commit stringlengths 40 40 | old_file stringlengths 4 237 | new_file stringlengths 4 237 | old_contents stringlengths 1 4.24k | new_contents stringlengths 5 4.84k | subject stringlengths 15 778 | message stringlengths 16 6.86k | lang stringlengths 1 30 | license stringclasses 13 values | repos stringlengths 5 116k | config stringlengths 1 30 | content stringlengths 105 8.72k |
|---|---|---|---|---|---|---|---|---|---|---|---|
2bab85d2952eedde04cdcbe2dc8a35312fd18b29 | src/search/searchresults.js | src/search/searchresults.js | import PropTypes from 'prop-types';
import React, { Fragment } from 'react';
import AUtextInput from '@gov.au/text-inputs';
/**
* The Search Results component
*
* @disable-docs
*/
const SearchResults = ( page ) => {
return (
<div className="container-fluid au-body">
<div className="row">
<div className="col-xs-12 searchresults__list">
<h2 className="au-display-xxl">{ page.heading }</h2>
<p><span id="searchresults__count" /> results for [query]</p>
<form className="search__searchbox" role="search" autoComplete="off" action="/search" method="get">
<input type="search" className="au-text-input" name="q" id="text-input" placeholder="Digital Guides"/>
<button type="submit" className="au-btn au-btn--light icon icon--search--dark search__button">Search</button>
</form>
<ul className="searchresults__ul" id="searchresults__resultslist"></ul>
</div>
</div>
</div>
);
}
SearchResults.propTypes = {
/**
* _body: (partials)(4)
*/
_body: PropTypes.node.isRequired,
};
SearchResults.defaultProps = {};
export default SearchResults;
| import PropTypes from 'prop-types';
import React, { Fragment } from 'react';
import AUtextInput from '@gov.au/text-inputs';
/**
* The Search Results component
*
* @disable-docs
*/
const SearchResults = ( page ) => {
return (
<div className="container-fluid au-body">
<div className="row">
<div className="col-xs-12 searchresults__list">
<h2 className="au-display-xxl">{ page.heading }</h2>
<p><span id="searchresults__count" /> results for [query]</p>
<div className="row">
<div className="col-xs-12 col-sm-6 col-md-5">
<form className="search__searchbox" role="search" autoComplete="off" action="/search" method="get">
<input type="search" className="au-text-input round--left" name="q" id="search-input" placeholder="Digital Guides"/>
<button type="submit" className="au-btn icon icon--search--dark round--right" id="search-btn">Search</button>
</form>
</div>
</div>
<ul className="searchresults__ul" id="searchresults__resultslist"></ul>
</div>
</div>
</div>
);
}
SearchResults.propTypes = {
/**
* _body: (partials)(4)
*/
_body: PropTypes.node.isRequired,
};
SearchResults.defaultProps = {};
export default SearchResults;
| Update width of search box | Update width of search box
Minor fixes to classes, add new rounding classes for search box and button to achieve a neat join effect.
| JavaScript | mit | govau/service-manual | javascript | ## Code Before:
import PropTypes from 'prop-types';
import React, { Fragment } from 'react';
import AUtextInput from '@gov.au/text-inputs';
/**
* The Search Results component
*
* @disable-docs
*/
const SearchResults = ( page ) => {
return (
<div className="container-fluid au-body">
<div className="row">
<div className="col-xs-12 searchresults__list">
<h2 className="au-display-xxl">{ page.heading }</h2>
<p><span id="searchresults__count" /> results for [query]</p>
<form className="search__searchbox" role="search" autoComplete="off" action="/search" method="get">
<input type="search" className="au-text-input" name="q" id="text-input" placeholder="Digital Guides"/>
<button type="submit" className="au-btn au-btn--light icon icon--search--dark search__button">Search</button>
</form>
<ul className="searchresults__ul" id="searchresults__resultslist"></ul>
</div>
</div>
</div>
);
}
SearchResults.propTypes = {
/**
* _body: (partials)(4)
*/
_body: PropTypes.node.isRequired,
};
SearchResults.defaultProps = {};
export default SearchResults;
## Instruction:
Update width of search box
Minor fixes to classes, add new rounding classes for search box and button to achieve a neat join effect.
## Code After:
import PropTypes from 'prop-types';
import React, { Fragment } from 'react';
import AUtextInput from '@gov.au/text-inputs';
/**
* The Search Results component
*
* @disable-docs
*/
const SearchResults = ( page ) => {
return (
<div className="container-fluid au-body">
<div className="row">
<div className="col-xs-12 searchresults__list">
<h2 className="au-display-xxl">{ page.heading }</h2>
<p><span id="searchresults__count" /> results for [query]</p>
<div className="row">
<div className="col-xs-12 col-sm-6 col-md-5">
<form className="search__searchbox" role="search" autoComplete="off" action="/search" method="get">
<input type="search" className="au-text-input round--left" name="q" id="search-input" placeholder="Digital Guides"/>
<button type="submit" className="au-btn icon icon--search--dark round--right" id="search-btn">Search</button>
</form>
</div>
</div>
<ul className="searchresults__ul" id="searchresults__resultslist"></ul>
</div>
</div>
</div>
);
}
SearchResults.propTypes = {
/**
* _body: (partials)(4)
*/
_body: PropTypes.node.isRequired,
};
SearchResults.defaultProps = {};
export default SearchResults;
|
832fecfe5bfc8951c0d302c2f913a81acfbc657c | solarnmf_main_ts.py | solarnmf_main_ts.py | import solarnmf_functions as snf
import solarnmf_plot_routines as spr
#Read in and format the time series
results = snf.make_t_matrix("simulation",format="timeseries",filename='/home/wtb2/Desktop/gaussian_test.dat')
#Get the dimensions of the T matrix
ny,nx = results['T'].shape
#Set the number of guessed sources
Q = 10
#Initialize the U, V, and A matrices
uva_initial = snf.initialize_uva(nx,ny,Q,5,5,results['T'])
#Start the minimizer
min_results = snf.minimize_div(uva_initial['u'],uva_initial['v'],results['T'],uva_initial['A'],200,1.0e-5)
#Show the initial and final matrices side-by-side
spr.plot_mat_obsVpred(results['T'],min_results['A'])
#Show the initial and final 1d time series curves
spr.plot_ts_obsVpred(results['x'],min_results['A'])
#Show the constituents of the time series on top of the original vector
spr.plot_ts_reconstruction(results['x'],min_results['u'],min_results['v'])
| import solarnmf_functions as snf
import solarnmf_plot_routines as spr
#Read in and format the time series
results = snf.make_t_matrix("simulation",format="timeseries",nx=100,ny=100,p=10,filename='/home/wtb2/Desktop/gaussian_test.dat')
#Get the dimensions of the T matrix
ny,nx = results['T'].shape
#Set the number of guessed sources
Q = 10
#Initialize the U, V, and A matrices
uva_initial = snf.initialize_uva(nx,ny,Q,5,10,results['T'])
#Start the minimizer
min_results = snf.minimize_div(uva_initial['u'],uva_initial['v'],results['T'],uva_initial['A'],100,1.0e-5)
#Show the initial and final matrices side-by-side
spr.plot_mat_obsVpred(results['T'],min_results['A'])
#Show the initial and final 1d time series curves
spr.plot_ts_obsVpred(results['x'],min_results['A'])
#Show the constituents of the time series on top of the original vector
spr.plot_ts_reconstruction(results['x'],min_results['u'],min_results['v'])
| Fix for input options in make_t_matrix function | Fix for input options in make_t_matrix function
| Python | mit | wtbarnes/solarnmf | python | ## Code Before:
import solarnmf_functions as snf
import solarnmf_plot_routines as spr
#Read in and format the time series
results = snf.make_t_matrix("simulation",format="timeseries",filename='/home/wtb2/Desktop/gaussian_test.dat')
#Get the dimensions of the T matrix
ny,nx = results['T'].shape
#Set the number of guessed sources
Q = 10
#Initialize the U, V, and A matrices
uva_initial = snf.initialize_uva(nx,ny,Q,5,5,results['T'])
#Start the minimizer
min_results = snf.minimize_div(uva_initial['u'],uva_initial['v'],results['T'],uva_initial['A'],200,1.0e-5)
#Show the initial and final matrices side-by-side
spr.plot_mat_obsVpred(results['T'],min_results['A'])
#Show the initial and final 1d time series curves
spr.plot_ts_obsVpred(results['x'],min_results['A'])
#Show the constituents of the time series on top of the original vector
spr.plot_ts_reconstruction(results['x'],min_results['u'],min_results['v'])
## Instruction:
Fix for input options in make_t_matrix function
## Code After:
import solarnmf_functions as snf
import solarnmf_plot_routines as spr
#Read in and format the time series
results = snf.make_t_matrix("simulation",format="timeseries",nx=100,ny=100,p=10,filename='/home/wtb2/Desktop/gaussian_test.dat')
#Get the dimensions of the T matrix
ny,nx = results['T'].shape
#Set the number of guessed sources
Q = 10
#Initialize the U, V, and A matrices
uva_initial = snf.initialize_uva(nx,ny,Q,5,10,results['T'])
#Start the minimizer
min_results = snf.minimize_div(uva_initial['u'],uva_initial['v'],results['T'],uva_initial['A'],100,1.0e-5)
#Show the initial and final matrices side-by-side
spr.plot_mat_obsVpred(results['T'],min_results['A'])
#Show the initial and final 1d time series curves
spr.plot_ts_obsVpred(results['x'],min_results['A'])
#Show the constituents of the time series on top of the original vector
spr.plot_ts_reconstruction(results['x'],min_results['u'],min_results['v'])
|
c3ac1a2085b0613635eccb50fc81cc60dd12c486 | .travis.yml | .travis.yml | language: cpp
compiler:
- gcc
- clang
script: cmake -DCMAKE_INSTALL_PREFIX=. && make && make install && tar czf vsrd-Linux-x64.tar.gz -C bin vsrd
| language: cpp
compiler:
- gcc
- clang
sudo: false
script: cmake -DCMAKE_INSTALL_PREFIX=. && make && make install && tar czf vsrd-Linux-x64.tar.gz -C bin vsrd
| Use new Travis CI infrastructure. | Use new Travis CI infrastructure.
| YAML | mit | DoomHammer/Very-Simple-Resource-Dumper,DoomHammer/Very-Simple-Resource-Dumper,DoomHammer/Very-Simple-Resource-Dumper,DoomHammer/Very-Simple-Resource-Dumper | yaml | ## Code Before:
language: cpp
compiler:
- gcc
- clang
script: cmake -DCMAKE_INSTALL_PREFIX=. && make && make install && tar czf vsrd-Linux-x64.tar.gz -C bin vsrd
## Instruction:
Use new Travis CI infrastructure.
## Code After:
language: cpp
compiler:
- gcc
- clang
sudo: false
script: cmake -DCMAKE_INSTALL_PREFIX=. && make && make install && tar czf vsrd-Linux-x64.tar.gz -C bin vsrd
|
afe792e50e6e30036f1ed718d7c3f5143a1e2da5 | adhocracy4/follows/signals.py | adhocracy4/follows/signals.py | from django.conf import settings
from django.db.models.signals import post_save
from . import models
def autofollow_hook(instance, **kwargs):
if hasattr(instance.project, 'id'):
models.Follow.objects.get_or_create(
project=instance.project,
creator=instance.creator,
defaults={
'enabled': True,
})
for model in settings.A4_AUTO_FOLLOWABLES:
post_save.connect(autofollow_hook, model)
| from django.apps import apps
from django.conf import settings
from django.db.models.signals import post_save
from . import models
def autofollow_hook(instance, **kwargs):
if hasattr(instance.project, 'id'):
models.Follow.objects.get_or_create(
project=instance.project,
creator=instance.creator,
defaults={
'enabled': True,
})
for app, model in settings.A4_AUTO_FOLLOWABLES:
post_save.connect(autofollow_hook, apps.get_model(app, model))
| Fix setting up AUTO_FOLLOWABLES models | Fix setting up AUTO_FOLLOWABLES models
Note that `Signal.connect` expects the model class as the sender
argument.
Altough while using e.g. `post_save` it also works with a string
`"apname.model"`
| Python | agpl-3.0 | liqd/adhocracy4,liqd/adhocracy4,liqd/adhocracy4,liqd/adhocracy4 | python | ## Code Before:
from django.conf import settings
from django.db.models.signals import post_save
from . import models
def autofollow_hook(instance, **kwargs):
if hasattr(instance.project, 'id'):
models.Follow.objects.get_or_create(
project=instance.project,
creator=instance.creator,
defaults={
'enabled': True,
})
for model in settings.A4_AUTO_FOLLOWABLES:
post_save.connect(autofollow_hook, model)
## Instruction:
Fix setting up AUTO_FOLLOWABLES models
Note that `Signal.connect` expects the model class as the sender
argument.
Altough while using e.g. `post_save` it also works with a string
`"apname.model"`
## Code After:
from django.apps import apps
from django.conf import settings
from django.db.models.signals import post_save
from . import models
def autofollow_hook(instance, **kwargs):
if hasattr(instance.project, 'id'):
models.Follow.objects.get_or_create(
project=instance.project,
creator=instance.creator,
defaults={
'enabled': True,
})
for app, model in settings.A4_AUTO_FOLLOWABLES:
post_save.connect(autofollow_hook, apps.get_model(app, model))
|
f9c54bbe8b92e1e05a93e66850cf143ccebe2811 | .travis.yml | .travis.yml | language: ruby
rvm:
- 2.2
- 2.3
- jruby-9.0.1.0
- jruby-9.0.5.0
gemfile:
- gemfiles/Gemfile.sidekiq-3.1.0
- gemfiles/Gemfile.sidekiq-3.3.4
- gemfiles/Gemfile.sidekiq-4.0.2
script: bundle exec rake test
addons:
code_climate:
repo_token: b11e6d8ed83dd2e01424b088c469e2cef525a89e887414f81e6ee7f36b937a1d
| language: ruby
rvm:
- 2.2
- 2.3
- jruby-9.0.1.0
- jruby-9.0.5.0
gemfile:
- gemfiles/Gemfile.sidekiq-2.17.7
- gemfiles/Gemfile.sidekiq-3.1.0
- gemfiles/Gemfile.sidekiq-3.3.4
- gemfiles/Gemfile.sidekiq-4.0.2
script: bundle exec rake test
addons:
code_climate:
repo_token: b11e6d8ed83dd2e01424b088c469e2cef525a89e887414f81e6ee7f36b937a1d
| Add Sidekiq 2.17 Gemfile to the list | Add Sidekiq 2.17 Gemfile to the list
| YAML | mit | NuckChorris/sidekiq-debounce,hummingbird-me/sidekiq-debounce | yaml | ## Code Before:
language: ruby
rvm:
- 2.2
- 2.3
- jruby-9.0.1.0
- jruby-9.0.5.0
gemfile:
- gemfiles/Gemfile.sidekiq-3.1.0
- gemfiles/Gemfile.sidekiq-3.3.4
- gemfiles/Gemfile.sidekiq-4.0.2
script: bundle exec rake test
addons:
code_climate:
repo_token: b11e6d8ed83dd2e01424b088c469e2cef525a89e887414f81e6ee7f36b937a1d
## Instruction:
Add Sidekiq 2.17 Gemfile to the list
## Code After:
language: ruby
rvm:
- 2.2
- 2.3
- jruby-9.0.1.0
- jruby-9.0.5.0
gemfile:
- gemfiles/Gemfile.sidekiq-2.17.7
- gemfiles/Gemfile.sidekiq-3.1.0
- gemfiles/Gemfile.sidekiq-3.3.4
- gemfiles/Gemfile.sidekiq-4.0.2
script: bundle exec rake test
addons:
code_climate:
repo_token: b11e6d8ed83dd2e01424b088c469e2cef525a89e887414f81e6ee7f36b937a1d
|
d3cea746432b1bfd1b5f2d38972c1b761b96e8eb | fetchroots.py | fetchroots.py | import os
import base64
from requests import Session, Request
from OpenSSL import crypto
#url = 'http://ct.googleapis.com/aviator/ct/v1/get-roots'
url = 'https://ct.api.venafi.com/ct/v1/get-roots'
s = Session()
r = Request('GET',
url)
prepped = r.prepare()
r = s.send(prepped)
if r.status_code == 200:
roots = r.json()
# RFC 6962 defines the certificate objects as base64 encoded certs.
# Importantly, these are not PEM formatted certs but base64 encoded
# ASN.1 (DER) encoded
for i in roots:
certs = roots[i]
for k in certs:
try:
certobj = crypto.load_certificate(crypto.FILETYPE_ASN1,base64.b64decode(k))
subject = certobj.get_subject()
print 'CN={},OU={},O={},L={},S={},C={}'.format(subject.commonName,
subject.organizationalUnitName,
subject.organizationName,
subject.localityName,
subject.stateOrProvinceName,
subject.countryName)
except:
print subject.get_components()
| import os
import base64
from requests import Session, Request
from OpenSSL import crypto
url = 'http://ct.googleapis.com/aviator/ct/v1/get-roots'
s = Session()
r = Request('GET',
url)
prepped = r.prepare()
r = s.send(prepped)
if r.status_code == 200:
roots = r.json()
# RFC 6962 defines the certificate objects as base64 encoded certs.
# Importantly, these are not PEM formatted certs but base64 encoded
# ASN.1 (DER) encoded
for i in roots:
certs = roots[i]
for k in certs:
try:
certobj = crypto.load_certificate(crypto.FILETYPE_ASN1,base64.b64decode(k))
subject = certobj.get_subject()
print 'CN={},OU={},O={},L={},S={},C={}'.format(subject.commonName,
subject.organizationalUnitName,
subject.organizationName,
subject.localityName,
subject.stateOrProvinceName,
subject.countryName)
except:
print subject.get_components()
| Update to use Google Aviator test log | Update to use Google Aviator test log
| Python | apache-2.0 | wgoulet/CTPyClient | python | ## Code Before:
import os
import base64
from requests import Session, Request
from OpenSSL import crypto
#url = 'http://ct.googleapis.com/aviator/ct/v1/get-roots'
url = 'https://ct.api.venafi.com/ct/v1/get-roots'
s = Session()
r = Request('GET',
url)
prepped = r.prepare()
r = s.send(prepped)
if r.status_code == 200:
roots = r.json()
# RFC 6962 defines the certificate objects as base64 encoded certs.
# Importantly, these are not PEM formatted certs but base64 encoded
# ASN.1 (DER) encoded
for i in roots:
certs = roots[i]
for k in certs:
try:
certobj = crypto.load_certificate(crypto.FILETYPE_ASN1,base64.b64decode(k))
subject = certobj.get_subject()
print 'CN={},OU={},O={},L={},S={},C={}'.format(subject.commonName,
subject.organizationalUnitName,
subject.organizationName,
subject.localityName,
subject.stateOrProvinceName,
subject.countryName)
except:
print subject.get_components()
## Instruction:
Update to use Google Aviator test log
## Code After:
import os
import base64
from requests import Session, Request
from OpenSSL import crypto
url = 'http://ct.googleapis.com/aviator/ct/v1/get-roots'
s = Session()
r = Request('GET',
url)
prepped = r.prepare()
r = s.send(prepped)
if r.status_code == 200:
roots = r.json()
# RFC 6962 defines the certificate objects as base64 encoded certs.
# Importantly, these are not PEM formatted certs but base64 encoded
# ASN.1 (DER) encoded
for i in roots:
certs = roots[i]
for k in certs:
try:
certobj = crypto.load_certificate(crypto.FILETYPE_ASN1,base64.b64decode(k))
subject = certobj.get_subject()
print 'CN={},OU={},O={},L={},S={},C={}'.format(subject.commonName,
subject.organizationalUnitName,
subject.organizationName,
subject.localityName,
subject.stateOrProvinceName,
subject.countryName)
except:
print subject.get_components()
|
272ff93a8593d09b477f562c4a81f36e104854ed | app/controllers/authentications_controller.rb | app/controllers/authentications_controller.rb | class AuthenticationsController < ApplicationController
before_action :only_admins
def create
if person = Person.authenticate(params[:authentication][:email], params[:authentication][:password])
render xml: person.to_xml(except: %w(salt encrypted_password feed_code api_key)), status: 201
elsif person.nil?
render plain: t('session.email_not_found'), status: 404
else
render plain: t('session.password_doesnt_match'), status: 401
end
end
private
def only_admins
unless @logged_in.super_admin?
render html: t('only_admins'), layout: true, status: 400
false
end
end
end
| class AuthenticationsController < ApplicationController
before_action :only_admins
def create
if person = Person.authenticate(params[:authentication][:email], params[:authentication][:password])
render xml: person.to_xml(except: %w(salt encrypted_password password_salt password_hash feed_code api_key)), status: 201
elsif person.nil?
render plain: t('session.email_not_found'), status: 404
else
render plain: t('session.password_doesnt_match'), status: 401
end
end
private
def only_admins
unless @logged_in.super_admin?
render html: t('only_admins'), layout: true, status: 400
false
end
end
end
| Exclude password fields from authentication api | Exclude password fields from authentication api | Ruby | agpl-3.0 | mattraykowski/onebody,hschin/onebody,mattraykowski/onebody,hschin/onebody,hschin/onebody,hschin/onebody,mattraykowski/onebody,mattraykowski/onebody | ruby | ## Code Before:
class AuthenticationsController < ApplicationController
before_action :only_admins
def create
if person = Person.authenticate(params[:authentication][:email], params[:authentication][:password])
render xml: person.to_xml(except: %w(salt encrypted_password feed_code api_key)), status: 201
elsif person.nil?
render plain: t('session.email_not_found'), status: 404
else
render plain: t('session.password_doesnt_match'), status: 401
end
end
private
def only_admins
unless @logged_in.super_admin?
render html: t('only_admins'), layout: true, status: 400
false
end
end
end
## Instruction:
Exclude password fields from authentication api
## Code After:
class AuthenticationsController < ApplicationController
before_action :only_admins
def create
if person = Person.authenticate(params[:authentication][:email], params[:authentication][:password])
render xml: person.to_xml(except: %w(salt encrypted_password password_salt password_hash feed_code api_key)), status: 201
elsif person.nil?
render plain: t('session.email_not_found'), status: 404
else
render plain: t('session.password_doesnt_match'), status: 401
end
end
private
def only_admins
unless @logged_in.super_admin?
render html: t('only_admins'), layout: true, status: 400
false
end
end
end
|
b36ad25030e409d9b8339f5e379346bbe7d764f1 | .cirrus.yml | .cirrus.yml | freebsd_instance:
image_family: freebsd-12-0
task:
prerequisites_script:
- sed -i '' 's/quarterly/latest/g' /etc/pkg/FreeBSD.conf
- pkg update -f
- pkg upgrade -y
- pkg install -y pkgconf vips libnghttp2 node npm
install_script:
- npm install --unsafe-perm
test_script:
- npm test
| freebsd_instance:
image_family: freebsd-13-0-snap
task:
prerequisites_script:
- pkg update -f
- pkg upgrade -y
- pkg install -y pkgconf vips node npm
install_script:
- npm install --unsafe-perm
test_script:
- npm test
| Upgrade FreeBSD to version 13 | CI: Upgrade FreeBSD to version 13
It currently provides only vips v8.8.3 so will (still) fail.
| YAML | apache-2.0 | lovell/sharp,lovell/sharp,lovell/sharp,lovell/sharp,lovell/sharp,lovell/sharp | yaml | ## Code Before:
freebsd_instance:
image_family: freebsd-12-0
task:
prerequisites_script:
- sed -i '' 's/quarterly/latest/g' /etc/pkg/FreeBSD.conf
- pkg update -f
- pkg upgrade -y
- pkg install -y pkgconf vips libnghttp2 node npm
install_script:
- npm install --unsafe-perm
test_script:
- npm test
## Instruction:
CI: Upgrade FreeBSD to version 13
It currently provides only vips v8.8.3 so will (still) fail.
## Code After:
freebsd_instance:
image_family: freebsd-13-0-snap
task:
prerequisites_script:
- pkg update -f
- pkg upgrade -y
- pkg install -y pkgconf vips node npm
install_script:
- npm install --unsafe-perm
test_script:
- npm test
|
66e1a824599c6c0d320ec7d49450e4ead087b4e3 | source/_posts/2014-06-30-gitlab-com-runs-ee.markdown | source/_posts/2014-06-30-gitlab-com-runs-ee.markdown | ---
layout: post
title: "GitLab.com now runs GitLab Enterprise Edition"
date: 2014-06-30 16:18:54 +0300
comments: true
categories:
author: Sytse Sijbrandij
---
[GitLab.com](https://about.gitlab.com/gitlab-com/) now runs [GitLab Enterprise Edition (EE)](https://about.gitlab.com/gitlab-ee/).
GitLab.com is our free SaaS offering allowing you to use GitLab with private repositories and unlimited collaborators.
You now get to use the most fully featured GitLab edition on GitLab.com.
<!--more-->
The reason for this is twofold.
First, we want to make it easy to allow you to experience all GitLab features.
Second, we want to run EE at scale ourselves since it contains more features than CE.
We want to be able to monitor all features on our own server for performance regressions or such things.
Monitor problems on our own servers is much easier since we have access to all graphs and logs.
This way we can solve problems before anybody notices them.
By the way, besides GitLab.com we also run another installation.
This is a server accessible only to GitLab B.V. employees because problems here are more noticeable.
This server is updated daily to the master branch of GitLab Community Edition.
This ensures we are immediately aware when someone merged a breaking change.
| ---
layout: post
title: "GitLab.com now runs GitLab Enterprise Edition"
date: 2014-06-30 16:18:54 +0300
comments: true
categories:
author: Sytse Sijbrandij
---
[GitLab.com](https://about.gitlab.com/gitlab-com/) now runs [GitLab Enterprise Edition (EE)](https://about.gitlab.com/gitlab-ee/).
GitLab.com is our free SaaS offering allowing you to use GitLab with private repositories and unlimited collaborators.
You now get to use the most fully featured GitLab edition on GitLab.com.
<!--more-->
The reason for this is twofold.
First, we want to make it easy to allow you to experience all GitLab features.
Second, we want to run EE at scale ourselves since it contains more features than CE.
We want to be able to monitor all features on our own server for things like performance regressions.
Monitor problems on our own servers is much easier since we have access to all graphs and logs.
This way we can solve problems before anybody notices them.
By the way, besides GitLab.com we also run another installation.
This is a server accessible only to GitLab B.V. employees because problems here are more noticeable.
This server is updated daily to the master branch of GitLab Community Edition.
This ensures we are immediately aware when someone merged a breaking change.
| Improve wording or blog post. | Improve wording or blog post.
| Markdown | mit | damianhakert/damianhakert.github.io | markdown | ## Code Before:
---
layout: post
title: "GitLab.com now runs GitLab Enterprise Edition"
date: 2014-06-30 16:18:54 +0300
comments: true
categories:
author: Sytse Sijbrandij
---
[GitLab.com](https://about.gitlab.com/gitlab-com/) now runs [GitLab Enterprise Edition (EE)](https://about.gitlab.com/gitlab-ee/).
GitLab.com is our free SaaS offering allowing you to use GitLab with private repositories and unlimited collaborators.
You now get to use the most fully featured GitLab edition on GitLab.com.
<!--more-->
The reason for this is twofold.
First, we want to make it easy to allow you to experience all GitLab features.
Second, we want to run EE at scale ourselves since it contains more features than CE.
We want to be able to monitor all features on our own server for performance regressions or such things.
Monitor problems on our own servers is much easier since we have access to all graphs and logs.
This way we can solve problems before anybody notices them.
By the way, besides GitLab.com we also run another installation.
This is a server accessible only to GitLab B.V. employees because problems here are more noticeable.
This server is updated daily to the master branch of GitLab Community Edition.
This ensures we are immediately aware when someone merged a breaking change.
## Instruction:
Improve wording or blog post.
## Code After:
---
layout: post
title: "GitLab.com now runs GitLab Enterprise Edition"
date: 2014-06-30 16:18:54 +0300
comments: true
categories:
author: Sytse Sijbrandij
---
[GitLab.com](https://about.gitlab.com/gitlab-com/) now runs [GitLab Enterprise Edition (EE)](https://about.gitlab.com/gitlab-ee/).
GitLab.com is our free SaaS offering allowing you to use GitLab with private repositories and unlimited collaborators.
You now get to use the most fully featured GitLab edition on GitLab.com.
<!--more-->
The reason for this is twofold.
First, we want to make it easy to allow you to experience all GitLab features.
Second, we want to run EE at scale ourselves since it contains more features than CE.
We want to be able to monitor all features on our own server for things like performance regressions.
Monitor problems on our own servers is much easier since we have access to all graphs and logs.
This way we can solve problems before anybody notices them.
By the way, besides GitLab.com we also run another installation.
This is a server accessible only to GitLab B.V. employees because problems here are more noticeable.
This server is updated daily to the master branch of GitLab Community Edition.
This ensures we are immediately aware when someone merged a breaking change.
|
2f8bed0896bbac0df1575340383b26f482010c20 | index.css | index.css | body {
margin:1em auto;
max-width:40em;
padding:0 .62em;
font:1.2em/1.62 Hack;
}
h1, h2, h3 {
line-height:1.2;
}
@media print {
body {
max-width:none
}
}
| body {
margin:1em auto;
max-width:40em;
padding:0 .62em;
font:1.2em/1.62 Hack;
linear-gradient(141deg, #0fb8ad 0%, #1fc8db 51%, #2cb5e8 75%)
}
h1, h2, h3 {
line-height:1.2;
border-bottom: 1px solid black;
}
h1 {
border-bottom: 2px solid black;
/* This overrides the 1px bottom border from previous section */
}
@media print {
body {
max-width:none
}
}
| Add background, add underline to headings. | Add background, add underline to headings. | CSS | bsd-2-clause | benrob0329/totallynotashadyweb.site | css | ## Code Before:
body {
margin:1em auto;
max-width:40em;
padding:0 .62em;
font:1.2em/1.62 Hack;
}
h1, h2, h3 {
line-height:1.2;
}
@media print {
body {
max-width:none
}
}
## Instruction:
Add background, add underline to headings.
## Code After:
body {
margin:1em auto;
max-width:40em;
padding:0 .62em;
font:1.2em/1.62 Hack;
linear-gradient(141deg, #0fb8ad 0%, #1fc8db 51%, #2cb5e8 75%)
}
h1, h2, h3 {
line-height:1.2;
border-bottom: 1px solid black;
}
h1 {
border-bottom: 2px solid black;
/* This overrides the 1px bottom border from previous section */
}
@media print {
body {
max-width:none
}
}
|
ee1ce653a3007e503aa6d85a32dafbe716e16406 | ext/lingua/extconf.rb | ext/lingua/extconf.rb | ENV['RC_ARCHS'] = '' if RUBY_PLATFORM =~ /darwin/
require "mkmf"
ROOT = File.expand_path(File.join(File.dirname(__FILE__), '..', '..'))
LIBSTEMMER = File.join(ROOT, 'libstemmer_c')
# build libstemmer_c
# FreeBSD make is gmake
make= (RUBY_PLATFORM =~ /freebsd/)? 'gmake' : 'make'
# MacOS architecture mess up
if RUBY_PLATFORM =~ /darwin/
begin
ENV['ARCHFLAGS']= "-arch " + %x[file #{File.expand_path(File.join(Config::CONFIG['bindir'], Config::CONFIG['RUBY_INSTALL_NAME']))}].strip!.match(/executable (.+)$/)[1] unless ENV['ARCHFLAGS'].nil?
rescue
$stderr << "Failed to get your ruby executable architecture.\n"
$stderr << "Please specify one using $ARCHFLAGS environment variable.\n"
exit
end
end
# make this stuff
system "cd #{LIBSTEMMER}; #{make} libstemmer.o; cd #{ROOT};"
exit unless $? == 0
$CFLAGS += " -I#{File.join(LIBSTEMMER, 'include')} "
$libs += " -L#{LIBSTEMMER} #{File.join(LIBSTEMMER, 'libstemmer.o')} "
if have_header("libstemmer.h")
create_makefile("lingua/stemmer_native")
end
| ENV['RC_ARCHS'] = '' if RUBY_PLATFORM =~ /darwin/
require "mkmf"
ROOT = File.expand_path(File.join(File.dirname(__FILE__), '..', '..'))
LIBSTEMMER = File.join(ROOT, 'libstemmer_c')
# build libstemmer_c
# FreeBSD make is gmake
make= (RUBY_PLATFORM =~ /freebsd/)? 'gmake' : 'make'
# MacOS architecture mess up
if RUBY_PLATFORM =~ /darwin/
begin
ENV['ARCHFLAGS']= "-arch " + %x[file #{File.expand_path(File.join(Config::CONFIG['bindir'], Config::CONFIG['RUBY_INSTALL_NAME']))}].strip!.match(/executable (.+)$/)[1] unless ENV['ARCHFLAGS'].nil?
rescue
$stderr << "Failed to get your ruby executable architecture.\n"
$stderr << "Please specify one using $ARCHFLAGS environment variable.\n"
exit
end
end
# make libstemmer_c. unless we're cross-compiling.
unless RUBY_PLATFORM =~ /i386-mingw32/
system "cd #{LIBSTEMMER}; #{make} libstemmer.o; cd #{ROOT};"
exit unless $? == 0
end
$CFLAGS += " -I#{File.expand_path(File.join(LIBSTEMMER, 'include'))} "
$libs += " -L#{LIBSTEMMER} #{File.expand_path(File.join(LIBSTEMMER, 'libstemmer.o'))} "
if have_header("libstemmer.h")
create_makefile("lingua/stemmer_native")
end
| Expand paths for compiler flags Don't build libstemmer.o on windows - should be build manually | Expand paths for compiler flags
Don't build libstemmer.o on windows - should be build manually
| Ruby | mit | aurelian/ruby-stemmer,aurelian/ruby-stemmer | ruby | ## Code Before:
ENV['RC_ARCHS'] = '' if RUBY_PLATFORM =~ /darwin/
require "mkmf"
ROOT = File.expand_path(File.join(File.dirname(__FILE__), '..', '..'))
LIBSTEMMER = File.join(ROOT, 'libstemmer_c')
# build libstemmer_c
# FreeBSD make is gmake
make= (RUBY_PLATFORM =~ /freebsd/)? 'gmake' : 'make'
# MacOS architecture mess up
if RUBY_PLATFORM =~ /darwin/
begin
ENV['ARCHFLAGS']= "-arch " + %x[file #{File.expand_path(File.join(Config::CONFIG['bindir'], Config::CONFIG['RUBY_INSTALL_NAME']))}].strip!.match(/executable (.+)$/)[1] unless ENV['ARCHFLAGS'].nil?
rescue
$stderr << "Failed to get your ruby executable architecture.\n"
$stderr << "Please specify one using $ARCHFLAGS environment variable.\n"
exit
end
end
# make this stuff
system "cd #{LIBSTEMMER}; #{make} libstemmer.o; cd #{ROOT};"
exit unless $? == 0
$CFLAGS += " -I#{File.join(LIBSTEMMER, 'include')} "
$libs += " -L#{LIBSTEMMER} #{File.join(LIBSTEMMER, 'libstemmer.o')} "
if have_header("libstemmer.h")
create_makefile("lingua/stemmer_native")
end
## Instruction:
Expand paths for compiler flags
Don't build libstemmer.o on windows - should be build manually
## Code After:
ENV['RC_ARCHS'] = '' if RUBY_PLATFORM =~ /darwin/
require "mkmf"
ROOT = File.expand_path(File.join(File.dirname(__FILE__), '..', '..'))
LIBSTEMMER = File.join(ROOT, 'libstemmer_c')
# build libstemmer_c
# FreeBSD make is gmake
make= (RUBY_PLATFORM =~ /freebsd/)? 'gmake' : 'make'
# MacOS architecture mess up
if RUBY_PLATFORM =~ /darwin/
begin
ENV['ARCHFLAGS']= "-arch " + %x[file #{File.expand_path(File.join(Config::CONFIG['bindir'], Config::CONFIG['RUBY_INSTALL_NAME']))}].strip!.match(/executable (.+)$/)[1] unless ENV['ARCHFLAGS'].nil?
rescue
$stderr << "Failed to get your ruby executable architecture.\n"
$stderr << "Please specify one using $ARCHFLAGS environment variable.\n"
exit
end
end
# make libstemmer_c. unless we're cross-compiling.
unless RUBY_PLATFORM =~ /i386-mingw32/
system "cd #{LIBSTEMMER}; #{make} libstemmer.o; cd #{ROOT};"
exit unless $? == 0
end
$CFLAGS += " -I#{File.expand_path(File.join(LIBSTEMMER, 'include'))} "
$libs += " -L#{LIBSTEMMER} #{File.expand_path(File.join(LIBSTEMMER, 'libstemmer.o'))} "
if have_header("libstemmer.h")
create_makefile("lingua/stemmer_native")
end
|
17b597513cb8c7aab777cd1ea5fe7228cd15c166 | pogo-protos.js | pogo-protos.js | const fs = require('fs'),
protobuf = require('protobufjs');
const builder = protobuf.newBuilder(),
protoDir = __dirname + '/proto';
fs.readdirSync(protoDir)
.filter(filename => filename.match(/\.proto$/))
.forEach(filename => protobuf.loadProtoFile(protoDir + '/' + filename, builder));
module.exports = builder.build("POGOProtos"); | const fs = require('fs'),
protobuf = require('protobufjs');
const builder = protobuf.newBuilder(),
protoDir = __dirname + '/proto';
fs.readdirSync(protoDir)
.filter(filename => filename.match(/\.proto$/))
.forEach(filename => protobuf.loadProtoFile(protoDir + '/' + filename, builder));
// Recursively add the packed=true to all packable repeated fields.
// Repeated fields are packed by default in proto3 but protobuf.js incorrectly does not set the option.
// See also: https://github.com/dcodeIO/protobuf.js/issues/432
function addPackedOption(ns) {
if (ns instanceof protobuf.Reflect.Message) {
ns.getChildren(protobuf.Reflect.Field).forEach(field => {
if (field.repeated && protobuf.PACKABLE_WIRE_TYPES.indexOf(field.type.wireType) != -1) {
field.options.packed = true;
}
});
} else if (ns instanceof protobuf.Reflect.Namespace) {
ns.children.forEach(addPackedOption);
}
}
addPackedOption(builder.lookup('POGOProtos'));
module.exports = builder.build("POGOProtos"); | Set packed=true for all repeated packable types | Set packed=true for all repeated packable types
Workaround for https://github.com/dcodeIO/protobuf.js/issues/432
| JavaScript | mit | cyraxx/node-pogo-protos,cyraxx/node-pogo-protos | javascript | ## Code Before:
const fs = require('fs'),
protobuf = require('protobufjs');
const builder = protobuf.newBuilder(),
protoDir = __dirname + '/proto';
fs.readdirSync(protoDir)
.filter(filename => filename.match(/\.proto$/))
.forEach(filename => protobuf.loadProtoFile(protoDir + '/' + filename, builder));
module.exports = builder.build("POGOProtos");
## Instruction:
Set packed=true for all repeated packable types
Workaround for https://github.com/dcodeIO/protobuf.js/issues/432
## Code After:
const fs = require('fs'),
protobuf = require('protobufjs');
const builder = protobuf.newBuilder(),
protoDir = __dirname + '/proto';
fs.readdirSync(protoDir)
.filter(filename => filename.match(/\.proto$/))
.forEach(filename => protobuf.loadProtoFile(protoDir + '/' + filename, builder));
// Recursively add the packed=true to all packable repeated fields.
// Repeated fields are packed by default in proto3 but protobuf.js incorrectly does not set the option.
// See also: https://github.com/dcodeIO/protobuf.js/issues/432
function addPackedOption(ns) {
if (ns instanceof protobuf.Reflect.Message) {
ns.getChildren(protobuf.Reflect.Field).forEach(field => {
if (field.repeated && protobuf.PACKABLE_WIRE_TYPES.indexOf(field.type.wireType) != -1) {
field.options.packed = true;
}
});
} else if (ns instanceof protobuf.Reflect.Namespace) {
ns.children.forEach(addPackedOption);
}
}
addPackedOption(builder.lookup('POGOProtos'));
module.exports = builder.build("POGOProtos"); |
18303dd3b3f89874a225aabedb26df5f5b5fa237 | tests/main.cpp | tests/main.cpp |
static void error_callback(int error, const char* description)
{
FAIL() << "GLFW Failed (" << error << "): " << description;
}
TEST(GLFWTests, BuildInvisibleWindow)
{
GLFWwindow* window;
glfwSetErrorCallback(error_callback);
if (!glfwInit())
FAIL() << "GLFW failed to initialize";
glfwWindowHint(GLFW_VISIBLE, 0);
window = glfwCreateWindow(640, 480, "Invisible Example", NULL, NULL);
if (!window)
{
glfwTerminate();
FAIL() << "Failed to create glfw window";
}
glfwDestroyWindow(window);
glfwTerminate();
}
int main(int argc, char** argv)
{
::testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
}
|
static void error_callback(int error, const char* description)
{
FAIL() << "GLFW Failed (" << error << "): " << description;
}
TEST(GLFWTests, BuildInvisibleWindow)
{
GLFWwindow* window;
glfwSetErrorCallback(error_callback);
if (!glfwInit())
FAIL() << "GLFW failed to initialize";
//glfwWindowHint(GLFW_VISIBLE, 0);
//window = glfwCreateWindow(640, 480, "Invisible Example", NULL, NULL);
//if (!window)
//{
// glfwTerminate();
// FAIL() << "Failed to create glfw window";
//}
//glfwDestroyWindow(window);
glfwTerminate();
}
int main(int argc, char** argv)
{
::testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
}
| Remove window display. There is no X server on travis. | Remove window display. There is no X server on travis.
| C++ | mit | iauns/cpm-glfw,iauns/cpm-glfw | c++ | ## Code Before:
static void error_callback(int error, const char* description)
{
FAIL() << "GLFW Failed (" << error << "): " << description;
}
TEST(GLFWTests, BuildInvisibleWindow)
{
GLFWwindow* window;
glfwSetErrorCallback(error_callback);
if (!glfwInit())
FAIL() << "GLFW failed to initialize";
glfwWindowHint(GLFW_VISIBLE, 0);
window = glfwCreateWindow(640, 480, "Invisible Example", NULL, NULL);
if (!window)
{
glfwTerminate();
FAIL() << "Failed to create glfw window";
}
glfwDestroyWindow(window);
glfwTerminate();
}
int main(int argc, char** argv)
{
::testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
}
## Instruction:
Remove window display. There is no X server on travis.
## Code After:
static void error_callback(int error, const char* description)
{
FAIL() << "GLFW Failed (" << error << "): " << description;
}
TEST(GLFWTests, BuildInvisibleWindow)
{
GLFWwindow* window;
glfwSetErrorCallback(error_callback);
if (!glfwInit())
FAIL() << "GLFW failed to initialize";
//glfwWindowHint(GLFW_VISIBLE, 0);
//window = glfwCreateWindow(640, 480, "Invisible Example", NULL, NULL);
//if (!window)
//{
// glfwTerminate();
// FAIL() << "Failed to create glfw window";
//}
//glfwDestroyWindow(window);
glfwTerminate();
}
int main(int argc, char** argv)
{
::testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
}
|
75fd5af85c4c4bea520215f356f1db894632479e | src/parseQueryDocument.js | src/parseQueryDocument.js | import {
parse,
visit,
visitWithTypeInfo,
TypeInfo
} from 'graphql';
export default function parseQueryDocument(queryDocument, schema) {
const typeInfo = new TypeInfo(schema);
const ast = parse(queryDocument);
function source(location) {
return queryDocument.slice(location.start, location.end);
}
return visit(ast, visitWithTypeInfo(typeInfo, {
leave: {
Name: node => node.value,
Document: node => node.definitions,
OperationDefinition: ({ loc, name, operation, variableDefinitions, selectionSet }) => {
return { name, operation, source: source(loc), variableDefinitions, selectionSet };
},
VariableDefinition: node => {
const type = typeInfo.getInputType();
return { name: node.variable, type: type };
},
Variable: node => node.name,
SelectionSet: ({ selections }) => selections,
Field: ({ kind, alias, name, arguments: args, directives, selectionSet }) => {
const type = typeInfo.getType();
return { kind, alias, name, type: type, selectionSet: selectionSet ? selectionSet : undefined }
}
}
}));
}
| import {
parse,
validate,
visit,
visitWithTypeInfo,
TypeInfo
} from 'graphql';
export default function parseQueryDocument(queryDocument, schema) {
const ast = parse(queryDocument);
const validationErrors = validate(schema, ast);
if (validationErrors && validationErrors.length > 0) {
for (const error of validationErrors) {
const location = error.locations[0];
console.log(`graphql:${location.line}: error: ${error.message}`);
}
throw Error("Validation of GraphQL query document failed");
}
const typeInfo = new TypeInfo(schema);
function source(location) {
return queryDocument.slice(location.start, location.end);
}
return visit(ast, visitWithTypeInfo(typeInfo, {
leave: {
Name: node => node.value,
Document: node => node.definitions,
OperationDefinition: ({ loc, name, operation, variableDefinitions, selectionSet }) => {
return { name, operation, source: source(loc), variableDefinitions, selectionSet };
},
VariableDefinition: node => {
const type = typeInfo.getInputType();
return { name: node.variable, type: type };
},
Variable: node => node.name,
SelectionSet: ({ selections }) => selections,
Field: ({ kind, alias, name, arguments: args, directives, selectionSet }) => {
const type = typeInfo.getType();
return { kind, alias, name, type: type, selectionSet: selectionSet ? selectionSet : undefined }
}
}
}));
}
| Validate query document and output Xcode compatible errors | Validate query document and output Xcode compatible errors
| JavaScript | mit | itajaja/apollo-codegen,KieranLafferty/apollo-codegen,KieranLafferty/apollo-codegen | javascript | ## Code Before:
import {
parse,
visit,
visitWithTypeInfo,
TypeInfo
} from 'graphql';
export default function parseQueryDocument(queryDocument, schema) {
const typeInfo = new TypeInfo(schema);
const ast = parse(queryDocument);
function source(location) {
return queryDocument.slice(location.start, location.end);
}
return visit(ast, visitWithTypeInfo(typeInfo, {
leave: {
Name: node => node.value,
Document: node => node.definitions,
OperationDefinition: ({ loc, name, operation, variableDefinitions, selectionSet }) => {
return { name, operation, source: source(loc), variableDefinitions, selectionSet };
},
VariableDefinition: node => {
const type = typeInfo.getInputType();
return { name: node.variable, type: type };
},
Variable: node => node.name,
SelectionSet: ({ selections }) => selections,
Field: ({ kind, alias, name, arguments: args, directives, selectionSet }) => {
const type = typeInfo.getType();
return { kind, alias, name, type: type, selectionSet: selectionSet ? selectionSet : undefined }
}
}
}));
}
## Instruction:
Validate query document and output Xcode compatible errors
## Code After:
import {
parse,
validate,
visit,
visitWithTypeInfo,
TypeInfo
} from 'graphql';
export default function parseQueryDocument(queryDocument, schema) {
const ast = parse(queryDocument);
const validationErrors = validate(schema, ast);
if (validationErrors && validationErrors.length > 0) {
for (const error of validationErrors) {
const location = error.locations[0];
console.log(`graphql:${location.line}: error: ${error.message}`);
}
throw Error("Validation of GraphQL query document failed");
}
const typeInfo = new TypeInfo(schema);
function source(location) {
return queryDocument.slice(location.start, location.end);
}
return visit(ast, visitWithTypeInfo(typeInfo, {
leave: {
Name: node => node.value,
Document: node => node.definitions,
OperationDefinition: ({ loc, name, operation, variableDefinitions, selectionSet }) => {
return { name, operation, source: source(loc), variableDefinitions, selectionSet };
},
VariableDefinition: node => {
const type = typeInfo.getInputType();
return { name: node.variable, type: type };
},
Variable: node => node.name,
SelectionSet: ({ selections }) => selections,
Field: ({ kind, alias, name, arguments: args, directives, selectionSet }) => {
const type = typeInfo.getType();
return { kind, alias, name, type: type, selectionSet: selectionSet ? selectionSet : undefined }
}
}
}));
}
|
f546a7657a801540c47fe8a872fc91a3fa97afdb | Casks/macpaw-gemini.rb | Casks/macpaw-gemini.rb | cask :v1 => 'macpaw-gemini' do
version :latest
sha256 :no_check
# devmate.com is the official download host per the vendor homepage
url 'http://dl.devmate.com/download/com.macpaw.site.Gemini/macpaw%20gemini.dmg'
appcast 'http://updates.devmate.com/com.macpaw.site.Gemini.xml'
homepage 'http://macpaw.com/gemini'
license :unknown # todo: change license and remove this comment; ':unknown' is a machine-generated placeholder
app 'MacPaw Gemini.app'
end
| cask :v1 => 'macpaw-gemini' do
version :latest
sha256 :no_check
# devmate.com is the official download host per the vendor homepage
url 'http://dl.devmate.com/com.macpaw.site.Gemini/MacPawGemini.dmg'
appcast 'http://updates.devmate.com/com.macpaw.site.Gemini.xml'
homepage 'http://macpaw.com/gemini'
license :commercial
app 'MacPaw Gemini.app'
end
| Update url and license for MacPaw Gemini.app | Update url and license for MacPaw Gemini.app
| Ruby | bsd-2-clause | fkrone/homebrew-cask,fharbe/homebrew-cask,kuno/homebrew-cask,paulbreslin/homebrew-cask,bsiddiqui/homebrew-cask,amatos/homebrew-cask,jpodlech/homebrew-cask,gyndav/homebrew-cask,vigosan/homebrew-cask,gerrypower/homebrew-cask,fazo96/homebrew-cask,diogodamiani/homebrew-cask,ericbn/homebrew-cask,squid314/homebrew-cask,Ibuprofen/homebrew-cask,koenrh/homebrew-cask,goxberry/homebrew-cask,Saklad5/homebrew-cask,epardee/homebrew-cask,katoquro/homebrew-cask,gguillotte/homebrew-cask,jedahan/homebrew-cask,JoelLarson/homebrew-cask,m3nu/homebrew-cask,neil-ca-moore/homebrew-cask,lcasey001/homebrew-cask,skatsuta/homebrew-cask,joschi/homebrew-cask,a-x-/homebrew-cask,linc01n/homebrew-cask,winkelsdorf/homebrew-cask,moimikey/homebrew-cask,chadcatlett/caskroom-homebrew-cask,devmynd/homebrew-cask,huanzhang/homebrew-cask,segiddins/homebrew-cask,a1russell/homebrew-cask,joaocc/homebrew-cask,giannitm/homebrew-cask,nathansgreen/homebrew-cask,genewoo/homebrew-cask,exherb/homebrew-cask,scw/homebrew-cask,jhowtan/homebrew-cask,tjt263/homebrew-cask,rhendric/homebrew-cask,uetchy/homebrew-cask,JikkuJose/homebrew-cask,tan9/homebrew-cask,coeligena/homebrew-customized,imgarylai/homebrew-cask,julienlavergne/homebrew-cask,illusionfield/homebrew-cask,xight/homebrew-cask,franklouwers/homebrew-cask,ohammersmith/homebrew-cask,kryhear/homebrew-cask,jalaziz/homebrew-cask,moonboots/homebrew-cask,MicTech/homebrew-cask,3van/homebrew-cask,MerelyAPseudonym/homebrew-cask,renaudguerin/homebrew-cask,segiddins/homebrew-cask,andersonba/homebrew-cask,Fedalto/homebrew-cask,codeurge/homebrew-cask,boydj/homebrew-cask,pacav69/homebrew-cask,jacobdam/homebrew-cask,JikkuJose/homebrew-cask,opsdev-ws/homebrew-cask,sebcode/homebrew-cask,chrisRidgers/homebrew-cask,tolbkni/homebrew-cask,larseggert/homebrew-cask,mathbunnyru/homebrew-cask,pinut/homebrew-cask,jamesmlees/homebrew-cask,moogar0880/homebrew-cask,SentinelWarren/homebrew-cask,jalaziz/homebrew-cask,jgarber623/homebrew-cask,elseym/homebrew-cask,lumaxis/homebrew-cask,joschi/homebrew-cask,My2ndAngelic/homebrew-cask,vigosan/homebrew-cask,BenjaminHCCarr/homebrew-cask,jgarber623/homebrew-cask,m3nu/homebrew-cask,guylabs/homebrew-cask,schneidmaster/homebrew-cask,catap/homebrew-cask,asbachb/homebrew-cask,riyad/homebrew-cask,feigaochn/homebrew-cask,atsuyim/homebrew-cask,ericbn/homebrew-cask,bkono/homebrew-cask,johntrandall/homebrew-cask,wKovacs64/homebrew-cask,nysthee/homebrew-cask,astorije/homebrew-cask,paour/homebrew-cask,Whoaa512/homebrew-cask,elnappo/homebrew-cask,sirodoht/homebrew-cask,epardee/homebrew-cask,MircoT/homebrew-cask,zorosteven/homebrew-cask,hackhandslabs/homebrew-cask,n8henrie/homebrew-cask,janlugt/homebrew-cask,wastrachan/homebrew-cask,okket/homebrew-cask,tedski/homebrew-cask,sjackman/homebrew-cask,yutarody/homebrew-cask,squid314/homebrew-cask,franklouwers/homebrew-cask,aki77/homebrew-cask,cblecker/homebrew-cask,nrlquaker/homebrew-cask,hovancik/homebrew-cask,joschi/homebrew-cask,malford/homebrew-cask,pinut/homebrew-cask,drostron/homebrew-cask,supriyantomaftuh/homebrew-cask,decrement/homebrew-cask,githubutilities/homebrew-cask,vuquoctuan/homebrew-cask,KosherBacon/homebrew-cask,ahundt/homebrew-cask,dwkns/homebrew-cask,boecko/homebrew-cask,sirodoht/homebrew-cask,Amorymeltzer/homebrew-cask,josa42/homebrew-cask,nickpellant/homebrew-cask,malob/homebrew-cask,lieuwex/homebrew-cask,alloy/homebrew-cask,rubenerd/homebrew-cask,jangalinski/homebrew-cask,d/homebrew-cask,esebastian/homebrew-cask,hanxue/caskroom,arronmabrey/homebrew-cask,aguynamedryan/homebrew-cask,opsdev-ws/homebrew-cask,shoichiaizawa/homebrew-cask,farmerchris/homebrew-cask,MoOx/homebrew-cask,jacobbednarz/homebrew-cask,sanchezm/homebrew-cask,robbiethegeek/homebrew-cask,otaran/homebrew-cask,taherio/homebrew-cask,BahtiyarB/homebrew-cask,Dremora/homebrew-cask,inta/homebrew-cask,ddm/homebrew-cask,rhendric/homebrew-cask,hristozov/homebrew-cask,MircoT/homebrew-cask,julionc/homebrew-cask,a1russell/homebrew-cask,tedbundyjr/homebrew-cask,bendoerr/homebrew-cask,flaviocamilo/homebrew-cask,dustinblackman/homebrew-cask,qnm/homebrew-cask,hellosky806/homebrew-cask,miku/homebrew-cask,LaurentFough/homebrew-cask,kievechua/homebrew-cask,garborg/homebrew-cask,kesara/homebrew-cask,paour/homebrew-cask,psibre/homebrew-cask,okket/homebrew-cask,lumaxis/homebrew-cask,qnm/homebrew-cask,bsiddiqui/homebrew-cask,adrianchia/homebrew-cask,diogodamiani/homebrew-cask,koenrh/homebrew-cask,zhuzihhhh/homebrew-cask,jmeridth/homebrew-cask,Hywan/homebrew-cask,yuhki50/homebrew-cask,joaocc/homebrew-cask,robertgzr/homebrew-cask,RJHsiao/homebrew-cask,My2ndAngelic/homebrew-cask,kirikiriyamama/homebrew-cask,dictcp/homebrew-cask,cblecker/homebrew-cask,hvisage/homebrew-cask,zhuzihhhh/homebrew-cask,royalwang/homebrew-cask,buo/homebrew-cask,mjgardner/homebrew-cask,yurikoles/homebrew-cask,lucasmezencio/homebrew-cask,mlocher/homebrew-cask,kamilboratynski/homebrew-cask,tmoreira2020/homebrew,cblecker/homebrew-cask,delphinus35/homebrew-cask,seanorama/homebrew-cask,skyyuan/homebrew-cask,theoriginalgri/homebrew-cask,yurrriq/homebrew-cask,markhuber/homebrew-cask,winkelsdorf/homebrew-cask,xyb/homebrew-cask,toonetown/homebrew-cask,bchatard/homebrew-cask,jen20/homebrew-cask,mwek/homebrew-cask,timsutton/homebrew-cask,nrlquaker/homebrew-cask,forevergenin/homebrew-cask,optikfluffel/homebrew-cask,zeusdeux/homebrew-cask,Dremora/homebrew-cask,retbrown/homebrew-cask,athrunsun/homebrew-cask,jaredsampson/homebrew-cask,seanzxx/homebrew-cask,blogabe/homebrew-cask,stevenmaguire/homebrew-cask,MisumiRize/homebrew-cask,Nitecon/homebrew-cask,jonathanwiesel/homebrew-cask,miccal/homebrew-cask,howie/homebrew-cask,mlocher/homebrew-cask,bosr/homebrew-cask,kteru/homebrew-cask,sgnh/homebrew-cask,alloy/homebrew-cask,coneman/homebrew-cask,crmne/homebrew-cask,jamesmlees/homebrew-cask,cfillion/homebrew-cask,mchlrmrz/homebrew-cask,dvdoliveira/homebrew-cask,syscrusher/homebrew-cask,flaviocamilo/homebrew-cask,jrwesolo/homebrew-cask,jellyfishcoder/homebrew-cask,rogeriopradoj/homebrew-cask,claui/homebrew-cask,jpodlech/homebrew-cask,Ketouem/homebrew-cask,jayshao/homebrew-cask,kuno/homebrew-cask,kei-yamazaki/homebrew-cask,wKovacs64/homebrew-cask,ninjahoahong/homebrew-cask,jpmat296/homebrew-cask,tangestani/homebrew-cask,MerelyAPseudonym/homebrew-cask,muan/homebrew-cask,FinalDes/homebrew-cask,theoriginalgri/homebrew-cask,LaurentFough/homebrew-cask,mathbunnyru/homebrew-cask,xtian/homebrew-cask,tsparber/homebrew-cask,renaudguerin/homebrew-cask,andersonba/homebrew-cask,kpearson/homebrew-cask,MichaelPei/homebrew-cask,ahvigil/homebrew-cask,samnung/homebrew-cask,sanyer/homebrew-cask,kiliankoe/homebrew-cask,victorpopkov/homebrew-cask,hackhandslabs/homebrew-cask,alexg0/homebrew-cask,julionc/homebrew-cask,guylabs/homebrew-cask,perfide/homebrew-cask,rkJun/homebrew-cask,enriclluelles/homebrew-cask,mindriot101/homebrew-cask,Ephemera/homebrew-cask,sparrc/homebrew-cask,gwaldo/homebrew-cask,mahori/homebrew-cask,shanonvl/homebrew-cask,caskroom/homebrew-cask,bdhess/homebrew-cask,vitorgalvao/homebrew-cask,otaran/homebrew-cask,miccal/homebrew-cask,moimikey/homebrew-cask,RickWong/homebrew-cask,m3nu/homebrew-cask,miku/homebrew-cask,retbrown/homebrew-cask,adrianchia/homebrew-cask,githubutilities/homebrew-cask,kolomiichenko/homebrew-cask,skatsuta/homebrew-cask,colindunn/homebrew-cask,xyb/homebrew-cask,scottsuch/homebrew-cask,aktau/homebrew-cask,jawshooah/homebrew-cask,zerrot/homebrew-cask,hyuna917/homebrew-cask,danielbayley/homebrew-cask,jacobbednarz/homebrew-cask,janlugt/homebrew-cask,iamso/homebrew-cask,enriclluelles/homebrew-cask,rkJun/homebrew-cask,kongslund/homebrew-cask,dustinblackman/homebrew-cask,samdoran/homebrew-cask,andrewdisley/homebrew-cask,xight/homebrew-cask,rickychilcott/homebrew-cask,moonboots/homebrew-cask,neil-ca-moore/homebrew-cask,sscotth/homebrew-cask,troyxmccall/homebrew-cask,dcondrey/homebrew-cask,jawshooah/homebrew-cask,ksato9700/homebrew-cask,af/homebrew-cask,dieterdemeyer/homebrew-cask,chadcatlett/caskroom-homebrew-cask,drostron/homebrew-cask,jacobdam/homebrew-cask,mahori/homebrew-cask,ddm/homebrew-cask,katoquro/homebrew-cask,mishari/homebrew-cask,gerrymiller/homebrew-cask,guerrero/homebrew-cask,nicolas-brousse/homebrew-cask,jeroenseegers/homebrew-cask,yumitsu/homebrew-cask,johnjelinek/homebrew-cask,tan9/homebrew-cask,nathanielvarona/homebrew-cask,cliffcotino/homebrew-cask,xight/homebrew-cask,josa42/homebrew-cask,kassi/homebrew-cask,nightscape/homebrew-cask,kesara/homebrew-cask,stigkj/homebrew-caskroom-cask,Ketouem/homebrew-cask,JosephViolago/homebrew-cask,jalaziz/homebrew-cask,corbt/homebrew-cask,imgarylai/homebrew-cask,miccal/homebrew-cask,6uclz1/homebrew-cask,michelegera/homebrew-cask,andyli/homebrew-cask,sysbot/homebrew-cask,jtriley/homebrew-cask,andyli/homebrew-cask,13k/homebrew-cask,larseggert/homebrew-cask,remko/homebrew-cask,lolgear/homebrew-cask,ahundt/homebrew-cask,kronicd/homebrew-cask,lieuwex/homebrew-cask,christer155/homebrew-cask,victorpopkov/homebrew-cask,jeroenseegers/homebrew-cask,JoelLarson/homebrew-cask,shorshe/homebrew-cask,slack4u/homebrew-cask,nickpellant/homebrew-cask,qbmiller/homebrew-cask,ywfwj2008/homebrew-cask,ianyh/homebrew-cask,CameronGarrett/homebrew-cask,sanyer/homebrew-cask,dspeckhard/homebrew-cask,genewoo/homebrew-cask,sohtsuka/homebrew-cask,artdevjs/homebrew-cask,mkozjak/homebrew-cask,helloIAmPau/homebrew-cask,kingthorin/homebrew-cask,FinalDes/homebrew-cask,troyxmccall/homebrew-cask,hyuna917/homebrew-cask,phpwutz/homebrew-cask,leonmachadowilcox/homebrew-cask,iAmGhost/homebrew-cask,johndbritton/homebrew-cask,boydj/homebrew-cask,flada-auxv/homebrew-cask,kteru/homebrew-cask,mahori/homebrew-cask,Amorymeltzer/homebrew-cask,chrisfinazzo/homebrew-cask,moogar0880/homebrew-cask,bgandon/homebrew-cask,dwihn0r/homebrew-cask,chino/homebrew-cask,stephenwade/homebrew-cask,xyb/homebrew-cask,albertico/homebrew-cask,phpwutz/homebrew-cask,jgarber623/homebrew-cask,feniix/homebrew-cask,exherb/homebrew-cask,sscotth/homebrew-cask,qbmiller/homebrew-cask,linc01n/homebrew-cask,renard/homebrew-cask,epmatsw/homebrew-cask,kei-yamazaki/homebrew-cask,aguynamedryan/homebrew-cask,shoichiaizawa/homebrew-cask,mazehall/homebrew-cask,ksato9700/homebrew-cask,pacav69/homebrew-cask,bric3/homebrew-cask,patresi/homebrew-cask,jasmas/homebrew-cask,vin047/homebrew-cask,mwean/homebrew-cask,yutarody/homebrew-cask,scottsuch/homebrew-cask,lolgear/homebrew-cask,mkozjak/homebrew-cask,wayou/homebrew-cask,sysbot/homebrew-cask,doits/homebrew-cask,esebastian/homebrew-cask,pablote/homebrew-cask,xiongchiamiov/homebrew-cask,stephenwade/homebrew-cask,yuhki50/homebrew-cask,hellosky806/homebrew-cask,mattrobenolt/homebrew-cask,BenjaminHCCarr/homebrew-cask,santoshsahoo/homebrew-cask,gmkey/homebrew-cask,jaredsampson/homebrew-cask,n0ts/homebrew-cask,gabrielizaias/homebrew-cask,muan/homebrew-cask,stevehedrick/homebrew-cask,ptb/homebrew-cask,slnovak/homebrew-cask,lvicentesanchez/homebrew-cask,coeligena/homebrew-customized,chrisfinazzo/homebrew-cask,lifepillar/homebrew-cask,tolbkni/homebrew-cask,amatos/homebrew-cask,sohtsuka/homebrew-cask,spruceb/homebrew-cask,englishm/homebrew-cask,axodys/homebrew-cask,stonehippo/homebrew-cask,0rax/homebrew-cask,wolflee/homebrew-cask,mjdescy/homebrew-cask,kpearson/homebrew-cask,fkrone/homebrew-cask,dunn/homebrew-cask,AnastasiaSulyagina/homebrew-cask,dunn/homebrew-cask,JosephViolago/homebrew-cask,diguage/homebrew-cask,nivanchikov/homebrew-cask,bkono/homebrew-cask,mattrobenolt/homebrew-cask,iAmGhost/homebrew-cask,shonjir/homebrew-cask,corbt/homebrew-cask,vmrob/homebrew-cask,Fedalto/homebrew-cask,yurrriq/homebrew-cask,deanmorin/homebrew-cask,hristozov/homebrew-cask,hovancik/homebrew-cask,goxberry/homebrew-cask,elyscape/homebrew-cask,ctrevino/homebrew-cask,thomanq/homebrew-cask,cohei/homebrew-cask,mwean/homebrew-cask,vmrob/homebrew-cask,patresi/homebrew-cask,blogabe/homebrew-cask,tarwich/homebrew-cask,a-x-/homebrew-cask,leonmachadowilcox/homebrew-cask,faun/homebrew-cask,morganestes/homebrew-cask,chuanxd/homebrew-cask,mauricerkelly/homebrew-cask,catap/homebrew-cask,zchee/homebrew-cask,bendoerr/homebrew-cask,elseym/homebrew-cask,crzrcn/homebrew-cask,shishi/homebrew-cask,jmeridth/homebrew-cask,atsuyim/homebrew-cask,greg5green/homebrew-cask,brianshumate/homebrew-cask,ch3n2k/homebrew-cask,kingthorin/homebrew-cask,alexg0/homebrew-cask,haha1903/homebrew-cask,jconley/homebrew-cask,lauantai/homebrew-cask,scottsuch/homebrew-cask,elyscape/homebrew-cask,rickychilcott/homebrew-cask,spruceb/homebrew-cask,xcezx/homebrew-cask,jen20/homebrew-cask,daften/homebrew-cask,ldong/homebrew-cask,rajiv/homebrew-cask,winkelsdorf/homebrew-cask,frapposelli/homebrew-cask,Labutin/homebrew-cask,napaxton/homebrew-cask,shoichiaizawa/homebrew-cask,malford/homebrew-cask,leipert/homebrew-cask,0xadada/homebrew-cask,alebcay/homebrew-cask,mchlrmrz/homebrew-cask,af/homebrew-cask,ajbw/homebrew-cask,FranklinChen/homebrew-cask,ayohrling/homebrew-cask,skyyuan/homebrew-cask,mgryszko/homebrew-cask,kiliankoe/homebrew-cask,joshka/homebrew-cask,rcuza/homebrew-cask,adelinofaria/homebrew-cask,miguelfrde/homebrew-cask,flada-auxv/homebrew-cask,lalyos/homebrew-cask,hanxue/caskroom,andrewdisley/homebrew-cask,vin047/homebrew-cask,ftiff/homebrew-cask,cedwardsmedia/homebrew-cask,aki77/homebrew-cask,reelsense/homebrew-cask,samshadwell/homebrew-cask,CameronGarrett/homebrew-cask,kkdd/homebrew-cask,ninjahoahong/homebrew-cask,jangalinski/homebrew-cask,huanzhang/homebrew-cask,y00rb/homebrew-cask,williamboman/homebrew-cask,jeanregisser/homebrew-cask,reitermarkus/homebrew-cask,bchatard/homebrew-cask,akiomik/homebrew-cask,unasuke/homebrew-cask,rubenerd/homebrew-cask,inz/homebrew-cask,esebastian/homebrew-cask,jeroenj/homebrew-cask,garborg/homebrew-cask,thii/homebrew-cask,Ephemera/homebrew-cask,alebcay/homebrew-cask,mfpierre/homebrew-cask,mariusbutuc/homebrew-cask,gibsjose/homebrew-cask,rogeriopradoj/homebrew-cask,gerrymiller/homebrew-cask,mikem/homebrew-cask,mchlrmrz/homebrew-cask,mauricerkelly/homebrew-cask,ohammersmith/homebrew-cask,casidiablo/homebrew-cask,danielgomezrico/homebrew-cask,arranubels/homebrew-cask,mjgardner/homebrew-cask,y00rb/homebrew-cask,gord1anknot/homebrew-cask,moimikey/homebrew-cask,crzrcn/homebrew-cask,johnjelinek/homebrew-cask,onlynone/homebrew-cask,tjnycum/homebrew-cask,antogg/homebrew-cask,nysthee/homebrew-cask,gibsjose/homebrew-cask,anbotero/homebrew-cask,kingthorin/homebrew-cask,rogeriopradoj/homebrew-cask,dlovitch/homebrew-cask,kamilboratynski/homebrew-cask,nathanielvarona/homebrew-cask,stigkj/homebrew-caskroom-cask,bcaceiro/homebrew-cask,arranubels/homebrew-cask,wickedsp1d3r/homebrew-cask,perfide/homebrew-cask,dictcp/homebrew-cask,SamiHiltunen/homebrew-cask,iamso/homebrew-cask,lvicentesanchez/homebrew-cask,bgandon/homebrew-cask,carlmod/homebrew-cask,askl56/homebrew-cask,axodys/homebrew-cask,julienlavergne/homebrew-cask,norio-nomura/homebrew-cask,zorosteven/homebrew-cask,johan/homebrew-cask,mikem/homebrew-cask,gmkey/homebrew-cask,farmerchris/homebrew-cask,wickles/homebrew-cask,gabrielizaias/homebrew-cask,guerrero/homebrew-cask,stonehippo/homebrew-cask,shonjir/homebrew-cask,nathanielvarona/homebrew-cask,RogerThiede/homebrew-cask,remko/homebrew-cask,toonetown/homebrew-cask,jeanregisser/homebrew-cask,fly19890211/homebrew-cask,mattfelsen/homebrew-cask,fanquake/homebrew-cask,tjnycum/homebrew-cask,anbotero/homebrew-cask,mazehall/homebrew-cask,gguillotte/homebrew-cask,6uclz1/homebrew-cask,yutarody/homebrew-cask,cliffcotino/homebrew-cask,sanyer/homebrew-cask,blainesch/homebrew-cask,miguelfrde/homebrew-cask,jbeagley52/homebrew-cask,deiga/homebrew-cask,cobyism/homebrew-cask,howie/homebrew-cask,johndbritton/homebrew-cask,lifepillar/homebrew-cask,klane/homebrew-cask,sparrc/homebrew-cask,adelinofaria/homebrew-cask,wizonesolutions/homebrew-cask,mingzhi22/homebrew-cask,joshka/homebrew-cask,colindean/homebrew-cask,jpmat296/homebrew-cask,nrlquaker/homebrew-cask,aktau/homebrew-cask,robbiethegeek/homebrew-cask,mindriot101/homebrew-cask,sosedoff/homebrew-cask,barravi/homebrew-cask,lalyos/homebrew-cask,yurikoles/homebrew-cask,ksylvan/homebrew-cask,astorije/homebrew-cask,sjackman/homebrew-cask,mattrobenolt/homebrew-cask,singingwolfboy/homebrew-cask,mathbunnyru/homebrew-cask,JacopKane/homebrew-cask,ksylvan/homebrew-cask,mokagio/homebrew-cask,kirikiriyamama/homebrew-cask,wolflee/homebrew-cask,klane/homebrew-cask,tarwich/homebrew-cask,chino/homebrew-cask,cclauss/homebrew-cask,gilesdring/homebrew-cask,Bombenleger/homebrew-cask,gilesdring/homebrew-cask,seanzxx/homebrew-cask,hakamadare/homebrew-cask,underyx/homebrew-cask,MichaelPei/homebrew-cask,forevergenin/homebrew-cask,paulombcosta/homebrew-cask,diguage/homebrew-cask,dieterdemeyer/homebrew-cask,zmwangx/homebrew-cask,lucasmezencio/homebrew-cask,giannitm/homebrew-cask,brianshumate/homebrew-cask,decrement/homebrew-cask,gurghet/homebrew-cask,FredLackeyOfficial/homebrew-cask,MisumiRize/homebrew-cask,tedbundyjr/homebrew-cask,unasuke/homebrew-cask,cohei/homebrew-cask,afh/homebrew-cask,mingzhi22/homebrew-cask,boecko/homebrew-cask,ywfwj2008/homebrew-cask,taherio/homebrew-cask,kTitan/homebrew-cask,jedahan/homebrew-cask,athrunsun/homebrew-cask,Hywan/homebrew-cask,shonjir/homebrew-cask,Whoaa512/homebrew-cask,asins/homebrew-cask,zchee/homebrew-cask,Amorymeltzer/homebrew-cask,rajiv/homebrew-cask,wickles/homebrew-cask,lcasey001/homebrew-cask,vuquoctuan/homebrew-cask,colindean/homebrew-cask,julionc/homebrew-cask,asbachb/homebrew-cask,dcondrey/homebrew-cask,shanonvl/homebrew-cask,lukeadams/homebrew-cask,mjgardner/homebrew-cask,gyugyu/homebrew-cask,BenjaminHCCarr/homebrew-cask,scw/homebrew-cask,paulombcosta/homebrew-cask,ayohrling/homebrew-cask,pkq/homebrew-cask,wuman/homebrew-cask,michelegera/homebrew-cask,sosedoff/homebrew-cask,jbeagley52/homebrew-cask,elnappo/homebrew-cask,hakamadare/homebrew-cask,gwaldo/homebrew-cask,wayou/homebrew-cask,FranklinChen/homebrew-cask,hvisage/homebrew-cask,Philosoft/homebrew-cask,maxnordlund/homebrew-cask,mgryszko/homebrew-cask,xtian/homebrew-cask,johntrandall/homebrew-cask,dictcp/homebrew-cask,antogg/homebrew-cask,MatzFan/homebrew-cask,retrography/homebrew-cask,kronicd/homebrew-cask,n8henrie/homebrew-cask,tranc99/homebrew-cask,nshemonsky/homebrew-cask,markthetech/homebrew-cask,a1russell/homebrew-cask,greg5green/homebrew-cask,epmatsw/homebrew-cask,tangestani/homebrew-cask,mattfelsen/homebrew-cask,christophermanning/homebrew-cask,kassi/homebrew-cask,fazo96/homebrew-cask,thehunmonkgroup/homebrew-cask,mokagio/homebrew-cask,JacopKane/homebrew-cask,JacopKane/homebrew-cask,13k/homebrew-cask,mrmachine/homebrew-cask,RogerThiede/homebrew-cask,dlovitch/homebrew-cask,coeligena/homebrew-customized,renard/homebrew-cask,adriweb/homebrew-cask,ptb/homebrew-cask,inta/homebrew-cask,fly19890211/homebrew-cask,alexg0/homebrew-cask,3van/homebrew-cask,Gasol/homebrew-cask,gurghet/homebrew-cask,bosr/homebrew-cask,josa42/homebrew-cask,jppelteret/homebrew-cask,mfpierre/homebrew-cask,frapposelli/homebrew-cask,kostasdizas/homebrew-cask,markhuber/homebrew-cask,dvdoliveira/homebrew-cask,jonathanwiesel/homebrew-cask,hanxue/caskroom,seanorama/homebrew-cask,lukeadams/homebrew-cask,jasmas/homebrew-cask,Bombenleger/homebrew-cask,ajbw/homebrew-cask,tranc99/homebrew-cask,puffdad/homebrew-cask,devmynd/homebrew-cask,delphinus35/homebrew-cask,xiongchiamiov/homebrew-cask,kievechua/homebrew-cask,caskroom/homebrew-cask,zeusdeux/homebrew-cask,Keloran/homebrew-cask,daften/homebrew-cask,zmwangx/homebrew-cask,nightscape/homebrew-cask,underyx/homebrew-cask,mhubig/homebrew-cask,xcezx/homebrew-cask,casidiablo/homebrew-cask,kryhear/homebrew-cask,chrisfinazzo/homebrew-cask,joshka/homebrew-cask,danielgomezrico/homebrew-cask,fwiesel/homebrew-cask,wuman/homebrew-cask,gord1anknot/homebrew-cask,chrisRidgers/homebrew-cask,mrmachine/homebrew-cask,danielbayley/homebrew-cask,sebcode/homebrew-cask,imgarylai/homebrew-cask,uetchy/homebrew-cask,kongslund/homebrew-cask,cobyism/homebrew-cask,optikfluffel/homebrew-cask,stevehedrick/homebrew-cask,reelsense/homebrew-cask,robertgzr/homebrew-cask,kTitan/homebrew-cask,Philosoft/homebrew-cask,MicTech/homebrew-cask,Cottser/homebrew-cask,dspeckhard/homebrew-cask,ch3n2k/homebrew-cask,0rax/homebrew-cask,reitermarkus/homebrew-cask,jconley/homebrew-cask,SamiHiltunen/homebrew-cask,claui/homebrew-cask,jellyfishcoder/homebrew-cask,jayshao/homebrew-cask,mishari/homebrew-cask,Keloran/homebrew-cask,royalwang/homebrew-cask,akiomik/homebrew-cask,stephenwade/homebrew-cask,tsparber/homebrew-cask,fharbe/homebrew-cask,SentinelWarren/homebrew-cask,AnastasiaSulyagina/homebrew-cask,wickedsp1d3r/homebrew-cask,malob/homebrew-cask,d/homebrew-cask,Cottser/homebrew-cask,jiashuw/homebrew-cask,sscotth/homebrew-cask,bcaceiro/homebrew-cask,sanchezm/homebrew-cask,Ngrd/homebrew-cask,mhubig/homebrew-cask,timsutton/homebrew-cask,ctrevino/homebrew-cask,pablote/homebrew-cask,slack4u/homebrew-cask,Ephemera/homebrew-cask,kesara/homebrew-cask,n0ts/homebrew-cask,afh/homebrew-cask,mwilmer/homebrew-cask,singingwolfboy/homebrew-cask,nivanchikov/homebrew-cask,pkq/homebrew-cask,buo/homebrew-cask,lauantai/homebrew-cask,mwilmer/homebrew-cask,blogabe/homebrew-cask,singingwolfboy/homebrew-cask,adrianchia/homebrew-cask,blainesch/homebrew-cask,alebcay/homebrew-cask,helloIAmPau/homebrew-cask,djakarta-trap/homebrew-myCask,scribblemaniac/homebrew-cask,santoshsahoo/homebrew-cask,jiashuw/homebrew-cask,scribblemaniac/homebrew-cask,dwihn0r/homebrew-cask,0xadada/homebrew-cask,neverfox/homebrew-cask,cobyism/homebrew-cask,napaxton/homebrew-cask,tmoreira2020/homebrew,nicolas-brousse/homebrew-cask,lantrix/homebrew-cask,bric3/homebrew-cask,colindunn/homebrew-cask,nathansgreen/homebrew-cask,cprecioso/homebrew-cask,lantrix/homebrew-cask,samdoran/homebrew-cask,tyage/homebrew-cask,illusionfield/homebrew-cask,optikfluffel/homebrew-cask,barravi/homebrew-cask,danielbayley/homebrew-cask,yurikoles/homebrew-cask,syscrusher/homebrew-cask,reitermarkus/homebrew-cask,gerrypower/homebrew-cask,Labutin/homebrew-cask,jtriley/homebrew-cask,bcomnes/homebrew-cask,timsutton/homebrew-cask,markthetech/homebrew-cask,tangestani/homebrew-cask,codeurge/homebrew-cask,feigaochn/homebrew-cask,morganestes/homebrew-cask,Saklad5/homebrew-cask,carlmod/homebrew-cask,askl56/homebrew-cask,Ibuprofen/homebrew-cask,scribblemaniac/homebrew-cask,bric3/homebrew-cask,KosherBacon/homebrew-cask,kevyau/homebrew-cask,nshemonsky/homebrew-cask,ahvigil/homebrew-cask,cfillion/homebrew-cask,coneman/homebrew-cask,riyad/homebrew-cask,Nitecon/homebrew-cask,albertico/homebrew-cask,psibre/homebrew-cask,artdevjs/homebrew-cask,haha1903/homebrew-cask,xakraz/homebrew-cask,stonehippo/homebrew-cask,andrewdisley/homebrew-cask,tyage/homebrew-cask,leipert/homebrew-cask,wmorin/homebrew-cask,neverfox/homebrew-cask,lukasbestle/homebrew-cask,usami-k/homebrew-cask,englishm/homebrew-cask,feniix/homebrew-cask,schneidmaster/homebrew-cask,mjdescy/homebrew-cask,uetchy/homebrew-cask,ebraminio/homebrew-cask,bdhess/homebrew-cask,retrography/homebrew-cask,paulbreslin/homebrew-cask,doits/homebrew-cask,tedski/homebrew-cask,claui/homebrew-cask,samshadwell/homebrew-cask,xakraz/homebrew-cask,gyndav/homebrew-cask,thomanq/homebrew-cask,paour/homebrew-cask,wmorin/homebrew-cask,kevyau/homebrew-cask,cclauss/homebrew-cask,ftiff/homebrew-cask,samnung/homebrew-cask,christer155/homebrew-cask,pkq/homebrew-cask,djakarta-trap/homebrew-myCask,deiga/homebrew-cask,rajiv/homebrew-cask,onlynone/homebrew-cask,kkdd/homebrew-cask,usami-k/homebrew-cask,nathancahill/homebrew-cask,MoOx/homebrew-cask,sgnh/homebrew-cask,RickWong/homebrew-cask,tjnycum/homebrew-cask,christophermanning/homebrew-cask,thehunmonkgroup/homebrew-cask,jppelteret/homebrew-cask,BahtiyarB/homebrew-cask,lukasbestle/homebrew-cask,puffdad/homebrew-cask,jrwesolo/homebrew-cask,fwiesel/homebrew-cask,Gasol/homebrew-cask,wesen/homebrew-cask,asins/homebrew-cask,shorshe/homebrew-cask,norio-nomura/homebrew-cask,shishi/homebrew-cask,thii/homebrew-cask,inz/homebrew-cask,arronmabrey/homebrew-cask,johan/homebrew-cask,jhowtan/homebrew-cask,neverfox/homebrew-cask,afdnlw/homebrew-cask,faun/homebrew-cask,nathancahill/homebrew-cask,rcuza/homebrew-cask,JosephViolago/homebrew-cask,vitorgalvao/homebrew-cask,cedwardsmedia/homebrew-cask,dwkns/homebrew-cask,malob/homebrew-cask,yumitsu/homebrew-cask,antogg/homebrew-cask,mwek/homebrew-cask,ebraminio/homebrew-cask,johnste/homebrew-cask,wmorin/homebrew-cask,Ngrd/homebrew-cask,zerrot/homebrew-cask,MatzFan/homebrew-cask,deanmorin/homebrew-cask,bcomnes/homebrew-cask,gyndav/homebrew-cask,RJHsiao/homebrew-cask,mariusbutuc/homebrew-cask,kostasdizas/homebrew-cask,fanquake/homebrew-cask,gyugyu/homebrew-cask,ianyh/homebrew-cask,FredLackeyOfficial/homebrew-cask,chuanxd/homebrew-cask,adriweb/homebrew-cask,stevenmaguire/homebrew-cask,maxnordlund/homebrew-cask,slnovak/homebrew-cask,deiga/homebrew-cask,crmne/homebrew-cask,cprecioso/homebrew-cask,tjt263/homebrew-cask,ericbn/homebrew-cask,supriyantomaftuh/homebrew-cask,ldong/homebrew-cask,johnste/homebrew-cask,wastrachan/homebrew-cask,afdnlw/homebrew-cask,jeroenj/homebrew-cask,williamboman/homebrew-cask,wizonesolutions/homebrew-cask,wesen/homebrew-cask,kolomiichenko/homebrew-cask | ruby | ## Code Before:
cask :v1 => 'macpaw-gemini' do
version :latest
sha256 :no_check
# devmate.com is the official download host per the vendor homepage
url 'http://dl.devmate.com/download/com.macpaw.site.Gemini/macpaw%20gemini.dmg'
appcast 'http://updates.devmate.com/com.macpaw.site.Gemini.xml'
homepage 'http://macpaw.com/gemini'
license :unknown # todo: change license and remove this comment; ':unknown' is a machine-generated placeholder
app 'MacPaw Gemini.app'
end
## Instruction:
Update url and license for MacPaw Gemini.app
## Code After:
cask :v1 => 'macpaw-gemini' do
version :latest
sha256 :no_check
# devmate.com is the official download host per the vendor homepage
url 'http://dl.devmate.com/com.macpaw.site.Gemini/MacPawGemini.dmg'
appcast 'http://updates.devmate.com/com.macpaw.site.Gemini.xml'
homepage 'http://macpaw.com/gemini'
license :commercial
app 'MacPaw Gemini.app'
end
|
99f3ea770d1bbaf3d955bed7d72b60fa7d3c2d43 | metadata.rb | metadata.rb | name 'yum-elrepo'
maintainer 'Chef Software, Inc.'
maintainer_email 'cookbooks@chef.io'
license 'Apache 2.0'
description 'Installs/Configures yum-elrepo'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '0.2.2'
depends 'yum', '~> 3.0'
source_url 'https://github.com/chef-cookbooks/yum-elrepo' if respond_to?(:source_url)
issues_url 'https://github.com/chef-cookbooks/yum-elrepo/issues' if respond_to?(:issues_url)
| name 'yum-elrepo'
maintainer 'Chef Software, Inc.'
maintainer_email 'cookbooks@chef.io'
license 'Apache 2.0'
description 'Installs and configures the elrepo yum repository'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '0.2.2'
depends 'yum', '~> 3.2'
source_url 'https://github.com/chef-cookbooks/yum-elrepo' if respond_to?(:source_url)
issues_url 'https://github.com/chef-cookbooks/yum-elrepo/issues' if respond_to?(:issues_url)
%w(amazon centos fedora oracle redhat scientific).each do |os|
supports os
end
| Improve description, bump requirement to yum 3.2 | Improve description, bump requirement to yum 3.2
| Ruby | apache-2.0 | chef-cookbooks/yum-elrepo | ruby | ## Code Before:
name 'yum-elrepo'
maintainer 'Chef Software, Inc.'
maintainer_email 'cookbooks@chef.io'
license 'Apache 2.0'
description 'Installs/Configures yum-elrepo'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '0.2.2'
depends 'yum', '~> 3.0'
source_url 'https://github.com/chef-cookbooks/yum-elrepo' if respond_to?(:source_url)
issues_url 'https://github.com/chef-cookbooks/yum-elrepo/issues' if respond_to?(:issues_url)
## Instruction:
Improve description, bump requirement to yum 3.2
## Code After:
name 'yum-elrepo'
maintainer 'Chef Software, Inc.'
maintainer_email 'cookbooks@chef.io'
license 'Apache 2.0'
description 'Installs and configures the elrepo yum repository'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '0.2.2'
depends 'yum', '~> 3.2'
source_url 'https://github.com/chef-cookbooks/yum-elrepo' if respond_to?(:source_url)
issues_url 'https://github.com/chef-cookbooks/yum-elrepo/issues' if respond_to?(:issues_url)
%w(amazon centos fedora oracle redhat scientific).each do |os|
supports os
end
|
9df4be9793c7677339d7b686da3d37adbed84977 | make.bat | make.bat | vcvarsall.bat x86
CL /I C:\OpenSSL-Win32\include /INCREMENTAL bmpow.c /MT /link /DLL /OUT:bmpow32.dll /LIBPATH:"C:\OpenSSL-Win32\lib" libeay32.lib ws2_32.lib
| C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat x86
CL /I C:\OpenSSL-Win32\include /INCREMENTAL bmpow.c /MT /link /DLL /OUT:bmpow32.dll /LIBPATH:"C:\OpenSSL-Win32\lib" libeay32.lib ws2_32.lib
| Use full path to vcvarsall | Use full path to vcvarsall
| Batchfile | mit | bmng-dev/bitmessage-pow,bmng-dev/bitmessage-pow,bmng-dev/bitmessage-pow | batchfile | ## Code Before:
vcvarsall.bat x86
CL /I C:\OpenSSL-Win32\include /INCREMENTAL bmpow.c /MT /link /DLL /OUT:bmpow32.dll /LIBPATH:"C:\OpenSSL-Win32\lib" libeay32.lib ws2_32.lib
## Instruction:
Use full path to vcvarsall
## Code After:
C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat x86
CL /I C:\OpenSSL-Win32\include /INCREMENTAL bmpow.c /MT /link /DLL /OUT:bmpow32.dll /LIBPATH:"C:\OpenSSL-Win32\lib" libeay32.lib ws2_32.lib
|
4e1e46cb989f56f8d69375f33cabbf565200fcdb | src/main/java/com/elmakers/mine/bukkit/action/BaseTeleportAction.java | src/main/java/com/elmakers/mine/bukkit/action/BaseTeleportAction.java | package com.elmakers.mine.bukkit.action;
import com.elmakers.mine.bukkit.api.action.CastContext;
import org.bukkit.Location;
import org.bukkit.configuration.ConfigurationSection;
import org.bukkit.entity.Entity;
public abstract class BaseTeleportAction extends BaseSpellAction
{
protected int verticalSearchDistance;
protected boolean safe = true;
@Override
public void prepare(CastContext context, ConfigurationSection parameters) {
super.prepare(context, parameters);
verticalSearchDistance = parameters.getInt("vertical_range", context.getVerticalSearchDistance());
safe = parameters.getBoolean("safe", true);
}
protected void teleport(CastContext context, Entity entity, Location targetLocation) {
context.teleport(entity, targetLocation, verticalSearchDistance, safe);
}
}
| package com.elmakers.mine.bukkit.action;
import com.elmakers.mine.bukkit.api.action.CastContext;
import com.elmakers.mine.bukkit.api.spell.SpellResult;
import org.bukkit.Location;
import org.bukkit.configuration.ConfigurationSection;
import org.bukkit.entity.Entity;
public abstract class BaseTeleportAction extends BaseSpellAction
{
protected int verticalSearchDistance;
protected boolean safe = true;
private boolean requiresBuildPermission = false;
@Override
public void prepare(CastContext context, ConfigurationSection parameters) {
super.prepare(context, parameters);
verticalSearchDistance = parameters.getInt("vertical_range", context.getVerticalSearchDistance());
safe = parameters.getBoolean("safe", true);
requiresBuildPermission = parameters.getBoolean("require_build", false);
}
protected SpellResult teleport(CastContext context, Entity entity, Location targetLocation) {
if (requiresBuildPermission && !context.hasBuildPermission(targetLocation.getBlock())) {
return SpellResult.INSUFFICIENT_PERMISSION;
}
return context.teleport(entity, targetLocation, verticalSearchDistance, safe) ? SpellResult.CAST : SpellResult.FAIL;
}
}
| Add require_build parameter to teleport actions | Add require_build parameter to teleport actions
| Java | mit | elBukkit/MagicLib,elBukkit/MagicPlugin,elBukkit/MagicPlugin,elBukkit/MagicPlugin | java | ## Code Before:
package com.elmakers.mine.bukkit.action;
import com.elmakers.mine.bukkit.api.action.CastContext;
import org.bukkit.Location;
import org.bukkit.configuration.ConfigurationSection;
import org.bukkit.entity.Entity;
public abstract class BaseTeleportAction extends BaseSpellAction
{
protected int verticalSearchDistance;
protected boolean safe = true;
@Override
public void prepare(CastContext context, ConfigurationSection parameters) {
super.prepare(context, parameters);
verticalSearchDistance = parameters.getInt("vertical_range", context.getVerticalSearchDistance());
safe = parameters.getBoolean("safe", true);
}
protected void teleport(CastContext context, Entity entity, Location targetLocation) {
context.teleport(entity, targetLocation, verticalSearchDistance, safe);
}
}
## Instruction:
Add require_build parameter to teleport actions
## Code After:
package com.elmakers.mine.bukkit.action;
import com.elmakers.mine.bukkit.api.action.CastContext;
import com.elmakers.mine.bukkit.api.spell.SpellResult;
import org.bukkit.Location;
import org.bukkit.configuration.ConfigurationSection;
import org.bukkit.entity.Entity;
public abstract class BaseTeleportAction extends BaseSpellAction
{
protected int verticalSearchDistance;
protected boolean safe = true;
private boolean requiresBuildPermission = false;
@Override
public void prepare(CastContext context, ConfigurationSection parameters) {
super.prepare(context, parameters);
verticalSearchDistance = parameters.getInt("vertical_range", context.getVerticalSearchDistance());
safe = parameters.getBoolean("safe", true);
requiresBuildPermission = parameters.getBoolean("require_build", false);
}
protected SpellResult teleport(CastContext context, Entity entity, Location targetLocation) {
if (requiresBuildPermission && !context.hasBuildPermission(targetLocation.getBlock())) {
return SpellResult.INSUFFICIENT_PERMISSION;
}
return context.teleport(entity, targetLocation, verticalSearchDistance, safe) ? SpellResult.CAST : SpellResult.FAIL;
}
}
|
1c55271f93544bd13be5dacc67ba213cee838785 | Readme.md | Readme.md | Clinch
======
*easy command-line parsing for Python 3*
```python
from clinch import application, arg
git = application('a stupid content tracker')
@git.command('Add things')
def add(all: arg('-A', '--all', action='store_true',
help='really make things match'),
paths: arg('paths', nargs='*')):
return (add, all, paths)
@git.command('Commit changes')
def commit(all: arg('-a', '--all',
help='include all modifications and deletions'),
message: arg('-m', '--message', help='the commit message'),
verbose: arg('-v', '--verbose', help='show diff in message editor')):
pass
if __name__ == '__main__':
git.run()
```
| Clinch
======
*easy command-line parsing for Python 3*
[](https://travis-ci.org/enaeseth/clinch)
```python
from clinch import application, arg
git = application('a stupid content tracker')
@git.command('Add things')
def add(all: arg('-A', '--all', action='store_true',
help='really make things match'),
paths: arg('paths', nargs='*')):
return (add, all, paths)
@git.command('Commit changes')
def commit(all: arg('-a', '--all',
help='include all modifications and deletions'),
message: arg('-m', '--message', help='the commit message'),
verbose: arg('-v', '--verbose', help='show diff in message editor')):
pass
if __name__ == '__main__':
git.run()
```
| Include Travis build status in readme | Include Travis build status in readme
| Markdown | mit | enaeseth/clinch | markdown | ## Code Before:
Clinch
======
*easy command-line parsing for Python 3*
```python
from clinch import application, arg
git = application('a stupid content tracker')
@git.command('Add things')
def add(all: arg('-A', '--all', action='store_true',
help='really make things match'),
paths: arg('paths', nargs='*')):
return (add, all, paths)
@git.command('Commit changes')
def commit(all: arg('-a', '--all',
help='include all modifications and deletions'),
message: arg('-m', '--message', help='the commit message'),
verbose: arg('-v', '--verbose', help='show diff in message editor')):
pass
if __name__ == '__main__':
git.run()
```
## Instruction:
Include Travis build status in readme
## Code After:
Clinch
======
*easy command-line parsing for Python 3*
[](https://travis-ci.org/enaeseth/clinch)
```python
from clinch import application, arg
git = application('a stupid content tracker')
@git.command('Add things')
def add(all: arg('-A', '--all', action='store_true',
help='really make things match'),
paths: arg('paths', nargs='*')):
return (add, all, paths)
@git.command('Commit changes')
def commit(all: arg('-a', '--all',
help='include all modifications and deletions'),
message: arg('-m', '--message', help='the commit message'),
verbose: arg('-v', '--verbose', help='show diff in message editor')):
pass
if __name__ == '__main__':
git.run()
```
|
8da9505c384f849b3e0eb14508913e89880bbe1d | src/Sylius/Bundle/AdminBundle/Resources/config/grids/product_variant_with_catalog_promotion.yml | src/Sylius/Bundle/AdminBundle/Resources/config/grids/product_variant_with_catalog_promotion.yml | sylius_grid:
grids:
sylius_admin_product_variant_with_catalog_promotion:
driver:
name: doctrine/orm
options:
class: "%sylius.model.product_variant.class%"
repository:
method: createCatalogPromotionListQueryBuilder
arguments: ['%locale%', 'expr:notFoundOnNull(service("sylius.repository.catalog_promotion").find($id))']
sorting:
code: asc
fields:
name:
type: twig
path: .
label: sylius.ui.name
options:
template: "@SyliusAdmin/ProductVariant/Grid/Field/name.html.twig"
code:
type: string
label: sylius.ui.code
enabled:
type: twig
label: sylius.ui.enabled
options:
template: "@SyliusUi/Grid/Field/enabled.html.twig"
inventory:
type: twig
path: .
label: sylius.ui.inventory
options:
template: "@SyliusAdmin/ProductVariant/Grid/Field/inventory.html.twig"
filters:
code:
type: string
label: sylius.ui.code
name:
type: string
label: sylius.ui.name
options:
fields: [translation.name]
actions:
item:
show_product:
type: show
label: sylius.ui.show_product
options:
link:
route: sylius_admin_product_show
parameters:
id: resource.product.id
| sylius_grid:
grids:
sylius_admin_product_variant_with_catalog_promotion:
driver:
name: doctrine/orm
options:
class: "%sylius.model.product_variant.class%"
repository:
method: createCatalogPromotionListQueryBuilder
arguments: ['%locale%', 'expr:notFoundOnNull(service("sylius.repository.catalog_promotion").find($id))']
sorting:
code: asc
fields:
name:
type: twig
path: .
label: sylius.ui.name
options:
template: "@SyliusAdmin/ProductVariant/Grid/Field/name.html.twig"
code:
type: string
label: sylius.ui.code
enabled:
type: twig
label: sylius.ui.enabled
options:
template: "@SyliusUi/Grid/Field/enabled.html.twig"
inventory:
type: twig
path: .
label: sylius.ui.inventory
options:
template: "@SyliusAdmin/ProductVariant/Grid/Field/inventory.html.twig"
filters:
code:
type: string
label: sylius.ui.code
name:
type: string
label: sylius.ui.name
options:
fields: [translation.name]
actions:
item:
show_product:
type: show
label: sylius.ui.show_product
options:
link:
route: sylius_admin_product_show
parameters:
id: resource.product.id
update:
type: update
options:
link:
parameters:
id: resource.id
productId: resource.product.id
| Add edit variant redirect action to affected variants list | [Admin][CatalogPromotion] Add edit variant redirect action to affected variants list
| YAML | mit | diimpp/Sylius,diimpp/Sylius,Sylius/Sylius,diimpp/Sylius,Sylius/Sylius,Sylius/Sylius | yaml | ## Code Before:
sylius_grid:
grids:
sylius_admin_product_variant_with_catalog_promotion:
driver:
name: doctrine/orm
options:
class: "%sylius.model.product_variant.class%"
repository:
method: createCatalogPromotionListQueryBuilder
arguments: ['%locale%', 'expr:notFoundOnNull(service("sylius.repository.catalog_promotion").find($id))']
sorting:
code: asc
fields:
name:
type: twig
path: .
label: sylius.ui.name
options:
template: "@SyliusAdmin/ProductVariant/Grid/Field/name.html.twig"
code:
type: string
label: sylius.ui.code
enabled:
type: twig
label: sylius.ui.enabled
options:
template: "@SyliusUi/Grid/Field/enabled.html.twig"
inventory:
type: twig
path: .
label: sylius.ui.inventory
options:
template: "@SyliusAdmin/ProductVariant/Grid/Field/inventory.html.twig"
filters:
code:
type: string
label: sylius.ui.code
name:
type: string
label: sylius.ui.name
options:
fields: [translation.name]
actions:
item:
show_product:
type: show
label: sylius.ui.show_product
options:
link:
route: sylius_admin_product_show
parameters:
id: resource.product.id
## Instruction:
[Admin][CatalogPromotion] Add edit variant redirect action to affected variants list
## Code After:
sylius_grid:
grids:
sylius_admin_product_variant_with_catalog_promotion:
driver:
name: doctrine/orm
options:
class: "%sylius.model.product_variant.class%"
repository:
method: createCatalogPromotionListQueryBuilder
arguments: ['%locale%', 'expr:notFoundOnNull(service("sylius.repository.catalog_promotion").find($id))']
sorting:
code: asc
fields:
name:
type: twig
path: .
label: sylius.ui.name
options:
template: "@SyliusAdmin/ProductVariant/Grid/Field/name.html.twig"
code:
type: string
label: sylius.ui.code
enabled:
type: twig
label: sylius.ui.enabled
options:
template: "@SyliusUi/Grid/Field/enabled.html.twig"
inventory:
type: twig
path: .
label: sylius.ui.inventory
options:
template: "@SyliusAdmin/ProductVariant/Grid/Field/inventory.html.twig"
filters:
code:
type: string
label: sylius.ui.code
name:
type: string
label: sylius.ui.name
options:
fields: [translation.name]
actions:
item:
show_product:
type: show
label: sylius.ui.show_product
options:
link:
route: sylius_admin_product_show
parameters:
id: resource.product.id
update:
type: update
options:
link:
parameters:
id: resource.id
productId: resource.product.id
|
b45af551c01a16ac8f570c5ddf82eb9fe37279a9 | test/QuestionSet.js | test/QuestionSet.js | var assert = require('assert');
describe('Question set', function () {
let QuestionSet = require('../src/QuestionSet');
let questions;
beforeEach(function () {
questions = new QuestionSet();
});
afterEach(function() {
delete questions;
});
it('can be chained', function() {
assert.equal(questions, questions.narrow());
assert.equal(questions, questions.noanswers);
});
it('#array returns question objects', function() {
let question = questions.array[0];
assert.notEqual(question.text, undefined);
assert.notEqual(question.answer, undefined);
});
it('#narrow(10).array returns a 10 element array', function() {
assert.equal(questions.narrow(10).array.length, 10);
});
it('#narrow(0).array returns an empty array', function() {
let result = questions.narrow(0).array;
assert.equal(result.constructor, Array);
assert.equal(result.length, 0);
});
it('#narrow(1000).array returns all questions', function() {
let length = questions.array.length;
assert.equal(questions.narrow(1000).array.length, length);
});
it('#noanswers leaves no answer properties', function() {
let result = questions.noanswers.array;
for (var i in result) {
assert.equal(result[i].answer, undefined);
}
});
});
| var assert = require('assert');
describe('Question set', function () {
let QuestionSet = require('../src/QuestionSet');
let questions;
beforeEach(function () {
questions = new QuestionSet();
});
afterEach(function() {
delete questions;
});
it('can be chained', function() {
assert.equal(questions, questions.narrow());
assert.equal(questions, questions.noanswers);
});
it('#array returns valid question objects', function() {
questions.forEach((question) => {
assert.notEqual(question.text, undefined);
assert.notEqual(question.answer, undefined);
assert.notEqual(question.choice_a, undefined);
});
});
it('#narrow(10).array returns a 10 element array', function() {
assert.equal(questions.narrow(10).array.length, 10);
});
it('#narrow(0).array returns an empty array', function() {
let result = questions.narrow(0).array;
assert.equal(result.constructor, Array);
assert.equal(result.length, 0);
});
it('#narrow(1000).array returns all questions', function() {
let length = questions.array.length;
assert.equal(questions.narrow(1000).array.length, length);
});
it('#noanswers leaves no answer properties', function() {
let result = questions.noanswers.array;
for (var i in result) {
assert.equal(result[i].answer, undefined);
}
});
});
| Change test for better coverage | Change test for better coverage
| JavaScript | mit | puck7744/AES-Front-End-Test | javascript | ## Code Before:
var assert = require('assert');
describe('Question set', function () {
let QuestionSet = require('../src/QuestionSet');
let questions;
beforeEach(function () {
questions = new QuestionSet();
});
afterEach(function() {
delete questions;
});
it('can be chained', function() {
assert.equal(questions, questions.narrow());
assert.equal(questions, questions.noanswers);
});
it('#array returns question objects', function() {
let question = questions.array[0];
assert.notEqual(question.text, undefined);
assert.notEqual(question.answer, undefined);
});
it('#narrow(10).array returns a 10 element array', function() {
assert.equal(questions.narrow(10).array.length, 10);
});
it('#narrow(0).array returns an empty array', function() {
let result = questions.narrow(0).array;
assert.equal(result.constructor, Array);
assert.equal(result.length, 0);
});
it('#narrow(1000).array returns all questions', function() {
let length = questions.array.length;
assert.equal(questions.narrow(1000).array.length, length);
});
it('#noanswers leaves no answer properties', function() {
let result = questions.noanswers.array;
for (var i in result) {
assert.equal(result[i].answer, undefined);
}
});
});
## Instruction:
Change test for better coverage
## Code After:
var assert = require('assert');
describe('Question set', function () {
let QuestionSet = require('../src/QuestionSet');
let questions;
beforeEach(function () {
questions = new QuestionSet();
});
afterEach(function() {
delete questions;
});
it('can be chained', function() {
assert.equal(questions, questions.narrow());
assert.equal(questions, questions.noanswers);
});
it('#array returns valid question objects', function() {
questions.forEach((question) => {
assert.notEqual(question.text, undefined);
assert.notEqual(question.answer, undefined);
assert.notEqual(question.choice_a, undefined);
});
});
it('#narrow(10).array returns a 10 element array', function() {
assert.equal(questions.narrow(10).array.length, 10);
});
it('#narrow(0).array returns an empty array', function() {
let result = questions.narrow(0).array;
assert.equal(result.constructor, Array);
assert.equal(result.length, 0);
});
it('#narrow(1000).array returns all questions', function() {
let length = questions.array.length;
assert.equal(questions.narrow(1000).array.length, length);
});
it('#noanswers leaves no answer properties', function() {
let result = questions.noanswers.array;
for (var i in result) {
assert.equal(result[i].answer, undefined);
}
});
});
|
e8b8c257c71b6c02fa691557618261e6832fba94 | faker/providers/ssn/uk_UA/__init__.py | faker/providers/ssn/uk_UA/__init__.py | from __future__ import unicode_literals
from .. import Provider as SsnProvider
# Note: as there no SSN in Ukraine
# we get value added tax identification number (VATIN) here.
# It is also called "Ідентифікаційний номер платника податків" (in ukrainian).
# It contains only digits and length if 12.
class Provider(SsnProvider):
ssn_formats = ("############",)
| from __future__ import unicode_literals
from datetime import date
from .. import Provider as SsnProvider
from faker.providers.date_time import Provider as DateTimeProvider
class Provider(SsnProvider):
@classmethod
def ssn(cls):
"""
Ukrainian "Реєстраційний номер облікової картки платника податків"
also known as "Ідентифікаційний номер фізичної особи".
"""
digits = []
# Number of days between 1899-12-31 and a birth date
for digit in str((DateTimeProvider.date_object() -
date(1899, 12, 31)).days):
digits.append(int(digit))
# Person's sequence number
for _ in range(4):
digits.append(cls.random_int(0, 9))
checksum = (digits[0]*-1 + digits[1]*5 + digits[2]*7 + digits[3]*9 +
digits[4]*4 + digits[5]*6 + digits[6]*10 + digits[7]*5 +
digits[8]*7)
# Remainder of a checksum divided by 11 or 1 if it equals to 10
digits.append(checksum % 11 % 10)
return ''.join(str(digit) for digit in digits)
| Make the Ukrainian SSN provider realer | Make the Ukrainian SSN provider realer
| Python | mit | joke2k/faker,danhuss/faker,trtd/faker,joke2k/faker | python | ## Code Before:
from __future__ import unicode_literals
from .. import Provider as SsnProvider
# Note: as there no SSN in Ukraine
# we get value added tax identification number (VATIN) here.
# It is also called "Ідентифікаційний номер платника податків" (in ukrainian).
# It contains only digits and length if 12.
class Provider(SsnProvider):
ssn_formats = ("############",)
## Instruction:
Make the Ukrainian SSN provider realer
## Code After:
from __future__ import unicode_literals
from datetime import date
from .. import Provider as SsnProvider
from faker.providers.date_time import Provider as DateTimeProvider
class Provider(SsnProvider):
@classmethod
def ssn(cls):
"""
Ukrainian "Реєстраційний номер облікової картки платника податків"
also known as "Ідентифікаційний номер фізичної особи".
"""
digits = []
# Number of days between 1899-12-31 and a birth date
for digit in str((DateTimeProvider.date_object() -
date(1899, 12, 31)).days):
digits.append(int(digit))
# Person's sequence number
for _ in range(4):
digits.append(cls.random_int(0, 9))
checksum = (digits[0]*-1 + digits[1]*5 + digits[2]*7 + digits[3]*9 +
digits[4]*4 + digits[5]*6 + digits[6]*10 + digits[7]*5 +
digits[8]*7)
# Remainder of a checksum divided by 11 or 1 if it equals to 10
digits.append(checksum % 11 % 10)
return ''.join(str(digit) for digit in digits)
|
1365ea0a1412ae345c56e19fe8f581e82ce93b88 | stagemonitor-core/src/main/java/org/stagemonitor/core/instrument/AbstractClassPathScanner.java | stagemonitor-core/src/main/java/org/stagemonitor/core/instrument/AbstractClassPathScanner.java | package org.stagemonitor.core.instrument;
import net.bytebuddy.agent.builder.AgentBuilder;
import net.bytebuddy.description.method.MethodDescription;
import net.bytebuddy.description.type.TypeDescription;
/**
* This transformer does not modify classes but only searches for matching {@link TypeDescription} and {@link MethodDescription}s
*/
public abstract class AbstractClassPathScanner extends StagemonitorByteBuddyTransformer {
@Override
public AgentBuilder.Transformer getTransformer() {
return AgentBuilder.Transformer.NoOp.INSTANCE;
}
@Override
public void beforeTransformation(TypeDescription typeDescription, ClassLoader classLoader) {
onTypeMatch(typeDescription);
}
protected void onTypeMatch(TypeDescription typeDescription) {
for (MethodDescription.InDefinedShape methodDescription : typeDescription.getDeclaredMethods()
.filter(getMethodElementMatcher())) {
onMethodMatch(methodDescription);
}
}
protected abstract void onMethodMatch(MethodDescription.InDefinedShape methodDescription);
}
| package org.stagemonitor.core.instrument;
import net.bytebuddy.agent.builder.AgentBuilder;
import net.bytebuddy.description.method.MethodDescription;
import net.bytebuddy.description.type.TypeDescription;
import net.bytebuddy.dynamic.DynamicType;
/**
* This transformer does not modify classes but only searches for matching {@link TypeDescription} and {@link MethodDescription}s
*/
public abstract class AbstractClassPathScanner extends StagemonitorByteBuddyTransformer {
@Override
public AgentBuilder.Transformer getTransformer() {
return new AgentBuilder.Transformer() {
@Override
public DynamicType.Builder<?> transform(DynamicType.Builder<?> builder, TypeDescription typeDescription, ClassLoader classLoader) {
onTypeMatch(typeDescription);
return builder;
}
};
}
protected void onTypeMatch(TypeDescription typeDescription) {
for (MethodDescription.InDefinedShape methodDescription : typeDescription.getDeclaredMethods()
.filter(getMethodElementMatcher())) {
onMethodMatch(methodDescription);
}
}
protected abstract void onMethodMatch(MethodDescription.InDefinedShape methodDescription);
}
| Fix Sla class path scanner | Fix Sla class path scanner
| Java | apache-2.0 | elevennl/stagemonitor,hexdecteam/stagemonitor,elevennl/stagemonitor,glamarre360/stagemonitor,stagemonitor/stagemonitor,stagemonitor/stagemonitor,stagemonitor/stagemonitor,trampi/stagemonitor,glamarre360/stagemonitor,hexdecteam/stagemonitor,hexdecteam/stagemonitor,glamarre360/stagemonitor,elevennl/stagemonitor,trampi/stagemonitor,hexdecteam/stagemonitor,trampi/stagemonitor,glamarre360/stagemonitor,stagemonitor/stagemonitor,trampi/stagemonitor,elevennl/stagemonitor | java | ## Code Before:
package org.stagemonitor.core.instrument;
import net.bytebuddy.agent.builder.AgentBuilder;
import net.bytebuddy.description.method.MethodDescription;
import net.bytebuddy.description.type.TypeDescription;
/**
* This transformer does not modify classes but only searches for matching {@link TypeDescription} and {@link MethodDescription}s
*/
public abstract class AbstractClassPathScanner extends StagemonitorByteBuddyTransformer {
@Override
public AgentBuilder.Transformer getTransformer() {
return AgentBuilder.Transformer.NoOp.INSTANCE;
}
@Override
public void beforeTransformation(TypeDescription typeDescription, ClassLoader classLoader) {
onTypeMatch(typeDescription);
}
protected void onTypeMatch(TypeDescription typeDescription) {
for (MethodDescription.InDefinedShape methodDescription : typeDescription.getDeclaredMethods()
.filter(getMethodElementMatcher())) {
onMethodMatch(methodDescription);
}
}
protected abstract void onMethodMatch(MethodDescription.InDefinedShape methodDescription);
}
## Instruction:
Fix Sla class path scanner
## Code After:
package org.stagemonitor.core.instrument;
import net.bytebuddy.agent.builder.AgentBuilder;
import net.bytebuddy.description.method.MethodDescription;
import net.bytebuddy.description.type.TypeDescription;
import net.bytebuddy.dynamic.DynamicType;
/**
* This transformer does not modify classes but only searches for matching {@link TypeDescription} and {@link MethodDescription}s
*/
public abstract class AbstractClassPathScanner extends StagemonitorByteBuddyTransformer {
@Override
public AgentBuilder.Transformer getTransformer() {
return new AgentBuilder.Transformer() {
@Override
public DynamicType.Builder<?> transform(DynamicType.Builder<?> builder, TypeDescription typeDescription, ClassLoader classLoader) {
onTypeMatch(typeDescription);
return builder;
}
};
}
protected void onTypeMatch(TypeDescription typeDescription) {
for (MethodDescription.InDefinedShape methodDescription : typeDescription.getDeclaredMethods()
.filter(getMethodElementMatcher())) {
onMethodMatch(methodDescription);
}
}
protected abstract void onMethodMatch(MethodDescription.InDefinedShape methodDescription);
}
|
bd1432aa6951b1610a816cc02d48d8926491ba04 | lib/pronto/formatter/github_formatter.rb | lib/pronto/formatter/github_formatter.rb | require 'octokit'
module Pronto
module Formatter
class GithubFormatter
attr_writer :client
def format(messages)
messages.each do |message|
@client.create_commit_comment(github_slug(message),
sha(message),
message.msg,
message.path,
message.line.new_lineno)
end
"#{messages.count} pronto messages posted to GitHub"
end
private
def github_slug(message)
message.repo.remotes.map(&:github_slug).compact.first
end
def sha(message)
blamelines = blame(message).lines
lineno = message.line.new_lineno
blameline = blamelines.detect { |line| line.lineno == lineno }
blameline.commit.id if blameline
end
def blame(message)
@blames ||= {}
@blames[message.path] ||= message.repo.blame(message.path)
@blames[message.path]
end
end
end
end
| require 'octokit'
module Pronto
module Formatter
class GithubFormatter
attr_writer :client
def format(messages)
messages.each do |message|
@client.create_commit_comment(github_slug(message),
sha(message),
message.msg,
message.path,
message.line.new_lineno)
end
end
private
def github_slug(message)
message.repo.remotes.map(&:github_slug).compact.first
end
def sha(message)
blamelines = blame(message).lines
lineno = message.line.new_lineno
blameline = blamelines.detect { |line| line.lineno == lineno }
blameline.commit.id if blameline
end
def blame(message)
@blames ||= {}
@blames[message.path] ||= message.repo.blame(message.path)
@blames[message.path]
end
end
end
end
| Remove output message in GitHub formatter | Remove output message in GitHub formatter
| Ruby | mit | HaiTo/pronto,jhass/pronto,mmozuras/pronto,mvz/pronto,prontolabs/pronto,treble37/pronto,aergonaut/pronto,gussan/pronto,Zauberstuhl/pronto | ruby | ## Code Before:
require 'octokit'
module Pronto
module Formatter
class GithubFormatter
attr_writer :client
def format(messages)
messages.each do |message|
@client.create_commit_comment(github_slug(message),
sha(message),
message.msg,
message.path,
message.line.new_lineno)
end
"#{messages.count} pronto messages posted to GitHub"
end
private
def github_slug(message)
message.repo.remotes.map(&:github_slug).compact.first
end
def sha(message)
blamelines = blame(message).lines
lineno = message.line.new_lineno
blameline = blamelines.detect { |line| line.lineno == lineno }
blameline.commit.id if blameline
end
def blame(message)
@blames ||= {}
@blames[message.path] ||= message.repo.blame(message.path)
@blames[message.path]
end
end
end
end
## Instruction:
Remove output message in GitHub formatter
## Code After:
require 'octokit'
module Pronto
module Formatter
class GithubFormatter
attr_writer :client
def format(messages)
messages.each do |message|
@client.create_commit_comment(github_slug(message),
sha(message),
message.msg,
message.path,
message.line.new_lineno)
end
end
private
def github_slug(message)
message.repo.remotes.map(&:github_slug).compact.first
end
def sha(message)
blamelines = blame(message).lines
lineno = message.line.new_lineno
blameline = blamelines.detect { |line| line.lineno == lineno }
blameline.commit.id if blameline
end
def blame(message)
@blames ||= {}
@blames[message.path] ||= message.repo.blame(message.path)
@blames[message.path]
end
end
end
end
|
c226c7f60018c97e9d5abbbf69f242cb39e20833 | .travis.yml | .travis.yml | language: ruby
rvm:
- 2.1.10
- 2.2.7
- 2.3.4
- 2.4.1
before_install: gem install bundler
cache: bundler
sudo: false
fast_finish: true
env:
- SKIP_INTEGRATION_TESTS=true
notifications:
email:
on_success: always
on_failure: always
| language: ruby
rvm:
- 2.3.4
- 2.4.1
before_install: gem install bundler
cache: bundler
sudo: false
fast_finish: true
env:
- SKIP_INTEGRATION_TESTS=true
notifications:
email:
on_success: always
on_failure: always
| Remove ruby versions 2.1 and 2.2, we require 2.3+ | Remove ruby versions 2.1 and 2.2, we require 2.3+
| YAML | mit | javierjulio/optimal_payments,javierjulio/paysafe,javierjulio/paysafe,javierjulio/optimal_payments | yaml | ## Code Before:
language: ruby
rvm:
- 2.1.10
- 2.2.7
- 2.3.4
- 2.4.1
before_install: gem install bundler
cache: bundler
sudo: false
fast_finish: true
env:
- SKIP_INTEGRATION_TESTS=true
notifications:
email:
on_success: always
on_failure: always
## Instruction:
Remove ruby versions 2.1 and 2.2, we require 2.3+
## Code After:
language: ruby
rvm:
- 2.3.4
- 2.4.1
before_install: gem install bundler
cache: bundler
sudo: false
fast_finish: true
env:
- SKIP_INTEGRATION_TESTS=true
notifications:
email:
on_success: always
on_failure: always
|
6ec3f74b9aa3996662901d9af7dcb0ee9e55deef | package.json | package.json | {
"name": "esparse",
"version": "0.6.5",
"description": "An ECMAScript Parser",
"homepage": "https://github.com/zenparsing/esparse",
"main": "dist/default.js",
"module": "src/default.js",
"devDependencies": {
"babel-cli": "^6.26.0",
"babel-plugin-transform-es2015-modules-commonjs": "^6.26.2",
"eslint": "^4.19.1"
},
"scripts": {
"lint": "eslint src/*",
"test": "npm run build && node test",
"build": "babel src --out-dir dist --plugins=transform-es2015-modules-commonjs",
"prepublishOnly": "npm run lint && npm test"
},
"dependencies": {}
}
| {
"name": "esparse",
"version": "0.6.5",
"description": "An ECMAScript Parser",
"homepage": "https://github.com/zenparsing/esparse",
"main": "dist/default.js",
"module": "src/default.js",
"devDependencies": {
"babel-cli": "^6.26.0",
"babel-plugin-transform-es2015-modules-commonjs": "^6.26.2",
"eslint": "^4.19.1"
},
"scripts": {
"lint": "eslint src/*",
"test": "npm run build && node test",
"build": "git clean -dfX ./dist && babel src --out-dir dist --plugins=transform-es2015-modules-commonjs",
"prepublishOnly": "npm run lint && npm test"
},
"dependencies": {}
}
| Add git clean to build script | Add git clean to build script
| JSON | mit | zenparsing/esparse | json | ## Code Before:
{
"name": "esparse",
"version": "0.6.5",
"description": "An ECMAScript Parser",
"homepage": "https://github.com/zenparsing/esparse",
"main": "dist/default.js",
"module": "src/default.js",
"devDependencies": {
"babel-cli": "^6.26.0",
"babel-plugin-transform-es2015-modules-commonjs": "^6.26.2",
"eslint": "^4.19.1"
},
"scripts": {
"lint": "eslint src/*",
"test": "npm run build && node test",
"build": "babel src --out-dir dist --plugins=transform-es2015-modules-commonjs",
"prepublishOnly": "npm run lint && npm test"
},
"dependencies": {}
}
## Instruction:
Add git clean to build script
## Code After:
{
"name": "esparse",
"version": "0.6.5",
"description": "An ECMAScript Parser",
"homepage": "https://github.com/zenparsing/esparse",
"main": "dist/default.js",
"module": "src/default.js",
"devDependencies": {
"babel-cli": "^6.26.0",
"babel-plugin-transform-es2015-modules-commonjs": "^6.26.2",
"eslint": "^4.19.1"
},
"scripts": {
"lint": "eslint src/*",
"test": "npm run build && node test",
"build": "git clean -dfX ./dist && babel src --out-dir dist --plugins=transform-es2015-modules-commonjs",
"prepublishOnly": "npm run lint && npm test"
},
"dependencies": {}
}
|
174bf1ed4e9f1d49419a1bb305cbf7fb8f0d9093 | web-interface/web-interface-client/src/main/js/init.js | web-interface/web-interface-client/src/main/js/init.js |
(function() {
var setup = false;
function ready() {
if (setup) {
return;
}
// TODO: Init
setup = true;
}
if (document.readyState === 'complete') {
setTimeout(ready, 0);
}
else if (document.addEventListener) {
document.addEventListener('DOMContentLoaded', ready, false);
window.addEventListener('load', ready, false);
}
else {
document.attachEvent('onreadystatechange', readyStateChange);
window.attachEvent('onload', ready);
}
})();
|
(function() {
var diffusion = require('diffusion');
function start() {
console.log('Ready');
session
.stream('?adapter/rest/model/store/')
.asType(diffusion.datatypes.json())
.on('value', function(path, specification, newValue, oldValue) {
console.log(path, newValue);
});
}
var started = false;
function tryStart() {
if (!started && setup && connected) {
start();
started = true;
}
}
var connected = false;
var session;
function connect() {
diffusion.connect({
host : 'localhost',
port : 8080
}).then(function (newSession) {
connected = true;
session = newSession;
session.subscribe('?adapter/rest/model/store/');
tryStart();
}, function() {
setTimeout(connect, 5000);
});
}
var setup = false;
function ready() {
if (setup) {
return;
}
tryStart();
setup = true;
}
if (document.readyState === 'complete') {
setTimeout(ready, 0);
}
else if (document.addEventListener) {
document.addEventListener('DOMContentLoaded', ready, false);
window.addEventListener('load', ready, false);
}
else {
document.attachEvent('onreadystatechange', readyStateChange);
window.attachEvent('onload', ready);
}
connect();
})();
| Connect the web interface to Diffusion. | Connect the web interface to Diffusion.
| JavaScript | apache-2.0 | pushtechnology/diffusion-rest-adapter,pushtechnology/diffusion-rest-adapter | javascript | ## Code Before:
(function() {
var setup = false;
function ready() {
if (setup) {
return;
}
// TODO: Init
setup = true;
}
if (document.readyState === 'complete') {
setTimeout(ready, 0);
}
else if (document.addEventListener) {
document.addEventListener('DOMContentLoaded', ready, false);
window.addEventListener('load', ready, false);
}
else {
document.attachEvent('onreadystatechange', readyStateChange);
window.attachEvent('onload', ready);
}
})();
## Instruction:
Connect the web interface to Diffusion.
## Code After:
(function() {
var diffusion = require('diffusion');
function start() {
console.log('Ready');
session
.stream('?adapter/rest/model/store/')
.asType(diffusion.datatypes.json())
.on('value', function(path, specification, newValue, oldValue) {
console.log(path, newValue);
});
}
var started = false;
function tryStart() {
if (!started && setup && connected) {
start();
started = true;
}
}
var connected = false;
var session;
function connect() {
diffusion.connect({
host : 'localhost',
port : 8080
}).then(function (newSession) {
connected = true;
session = newSession;
session.subscribe('?adapter/rest/model/store/');
tryStart();
}, function() {
setTimeout(connect, 5000);
});
}
var setup = false;
function ready() {
if (setup) {
return;
}
tryStart();
setup = true;
}
if (document.readyState === 'complete') {
setTimeout(ready, 0);
}
else if (document.addEventListener) {
document.addEventListener('DOMContentLoaded', ready, false);
window.addEventListener('load', ready, false);
}
else {
document.attachEvent('onreadystatechange', readyStateChange);
window.attachEvent('onload', ready);
}
connect();
})();
|
ed3597688b01f666ec716108bf7c348338ef14af | index.js | index.js | 'use strict'
var detect = require('acorn-globals');
var lastSRC = '(null)';
var lastRes = true;
var lastConstants = undefined;
module.exports = isConstant;
function isConstant(src, constants) {
src = '(' + src + ')';
if (lastSRC === src && lastConstants === constants) return lastRes;
lastSRC = src;
lastConstants = constants;
try {
Function('return (' + src + ')');
return lastRes = (detect(src).filter(function (key) {
return !constants || !(key.name in constants);
}).length === 0);
} catch (ex) {
return lastRes = false;
}
}
isConstant.isConstant = isConstant;
isConstant.toConstant = toConstant;
function toConstant(src, constants) {
if (!isConstant(src, constants)) throw new Error(JSON.stringify(src) + ' is not constant.');
return Function(Object.keys(constants || {}).join(','), 'return (' + src + ')').apply(null, Object.keys(constants || {}).map(function (key) {
return constants[key];
}));
}
| 'use strict'
var detect = require('acorn-globals');
var lastSRC = '(null)';
var lastRes = true;
var lastConstants = undefined;
module.exports = isConstant;
function isConstant(src, constants) {
src = '(' + src + ')';
if (lastSRC === src && lastConstants === constants) return lastRes;
lastSRC = src;
lastConstants = constants;
try {
isExpression(src);
return lastRes = (detect(src).filter(function (key) {
return !constants || !(key.name in constants);
}).length === 0);
} catch (ex) {
return lastRes = false;
}
}
isConstant.isConstant = isConstant;
isConstant.toConstant = toConstant;
function toConstant(src, constants) {
if (!isConstant(src, constants)) throw new Error(JSON.stringify(src) + ' is not constant.');
return Function(Object.keys(constants || {}).join(','), 'return (' + src + ')').apply(null, Object.keys(constants || {}).map(function (key) {
return constants[key];
}));
}
function isExpression(src) {
try {
eval('throw "STOP"; (function () { return (' + src + '); })()');
return false;
}
catch (err) {
return err === 'STOP';
}
}
| Use a safer test for isExpression | Use a safer test for isExpression
It is now safe to use `isConstant` on un-trusted input, but it is still
not safe to use `toConstant` on un-trusted input.
| JavaScript | mit | ForbesLindesay/constantinople | javascript | ## Code Before:
'use strict'
var detect = require('acorn-globals');
var lastSRC = '(null)';
var lastRes = true;
var lastConstants = undefined;
module.exports = isConstant;
function isConstant(src, constants) {
src = '(' + src + ')';
if (lastSRC === src && lastConstants === constants) return lastRes;
lastSRC = src;
lastConstants = constants;
try {
Function('return (' + src + ')');
return lastRes = (detect(src).filter(function (key) {
return !constants || !(key.name in constants);
}).length === 0);
} catch (ex) {
return lastRes = false;
}
}
isConstant.isConstant = isConstant;
isConstant.toConstant = toConstant;
function toConstant(src, constants) {
if (!isConstant(src, constants)) throw new Error(JSON.stringify(src) + ' is not constant.');
return Function(Object.keys(constants || {}).join(','), 'return (' + src + ')').apply(null, Object.keys(constants || {}).map(function (key) {
return constants[key];
}));
}
## Instruction:
Use a safer test for isExpression
It is now safe to use `isConstant` on un-trusted input, but it is still
not safe to use `toConstant` on un-trusted input.
## Code After:
'use strict'
var detect = require('acorn-globals');
var lastSRC = '(null)';
var lastRes = true;
var lastConstants = undefined;
module.exports = isConstant;
function isConstant(src, constants) {
src = '(' + src + ')';
if (lastSRC === src && lastConstants === constants) return lastRes;
lastSRC = src;
lastConstants = constants;
try {
isExpression(src);
return lastRes = (detect(src).filter(function (key) {
return !constants || !(key.name in constants);
}).length === 0);
} catch (ex) {
return lastRes = false;
}
}
isConstant.isConstant = isConstant;
isConstant.toConstant = toConstant;
function toConstant(src, constants) {
if (!isConstant(src, constants)) throw new Error(JSON.stringify(src) + ' is not constant.');
return Function(Object.keys(constants || {}).join(','), 'return (' + src + ')').apply(null, Object.keys(constants || {}).map(function (key) {
return constants[key];
}));
}
function isExpression(src) {
try {
eval('throw "STOP"; (function () { return (' + src + '); })()');
return false;
}
catch (err) {
return err === 'STOP';
}
}
|
279ecd20dec094fc33440c8fc27af4501c9a0ede | README.md | README.md | Simple danmaku game and engine. WIP.
[Info] (https://titanpad.com/l0Vjfgw4UH)
| Simple danmaku game and engine. WIP.
[Info] (https://titanpad.com/l0Vjfgw4UH)
# Engine Internals
The engine is based on a single threaded entity-component system with reactive entities.
All entities interact with the engine via events passed to them, and by creating and using preprovided components the engine supplies.
The engine's main loop functions as such:
* Create a new cleared frame.
* Pass the Render event to all entities.
* Write instanced sprite data to the frame.
* Finish the frame.
* Poll the window for events(keyboard presses, mouse movement, etc.), convert them to internal events and dispatch them to all subscribed entities.
* Using time stored in an accumulator, dispatch an Update event to all entities, and update the physics aspect of the world, dispatching Collision/Proximity events as needed.
* Repeat.
| Add some info about the engine | Add some info about the engine
| Markdown | isc | Luminarys/Nishikaku | markdown | ## Code Before:
Simple danmaku game and engine. WIP.
[Info] (https://titanpad.com/l0Vjfgw4UH)
## Instruction:
Add some info about the engine
## Code After:
Simple danmaku game and engine. WIP.
[Info] (https://titanpad.com/l0Vjfgw4UH)
# Engine Internals
The engine is based on a single threaded entity-component system with reactive entities.
All entities interact with the engine via events passed to them, and by creating and using preprovided components the engine supplies.
The engine's main loop functions as such:
* Create a new cleared frame.
* Pass the Render event to all entities.
* Write instanced sprite data to the frame.
* Finish the frame.
* Poll the window for events(keyboard presses, mouse movement, etc.), convert them to internal events and dispatch them to all subscribed entities.
* Using time stored in an accumulator, dispatch an Update event to all entities, and update the physics aspect of the world, dispatching Collision/Proximity events as needed.
* Repeat.
|
4cf8c5ea88887a167c216a16afb9605963564d73 | frontend/src/app/routes.js | frontend/src/app/routes.js | import React from "react";
import {IndexRedirect, Route} from "react-router";
import Admin from "app/layouts/Admin";
import users from "app/users/routes";
export default (
<Route path="/">
<IndexRedirect to="admin/users"/>
<Route component={Admin} path="admin">
<IndexRedirect to="users"/>
{users}
</Route>
</Route>
);
| import React from "react";
import {IndexRedirect, Route} from "react-router";
import Admin from "app/layouts/Admin";
import RouteNotFound from "app/components/RouteNotFound";
import users from "app/users/routes";
export default (
<Route path="/">
<IndexRedirect to="admin/users"/>
<Route component={Admin} path="admin">
<IndexRedirect to="users"/>
{users}
<Route path="*" component={RouteNotFound}/>
</Route>
</Route>
);
| Add route that catches 404's | Add route that catches 404's
| JavaScript | mit | scottwoodall/django-react-template,scottwoodall/django-react-template,scottwoodall/django-react-template | javascript | ## Code Before:
import React from "react";
import {IndexRedirect, Route} from "react-router";
import Admin from "app/layouts/Admin";
import users from "app/users/routes";
export default (
<Route path="/">
<IndexRedirect to="admin/users"/>
<Route component={Admin} path="admin">
<IndexRedirect to="users"/>
{users}
</Route>
</Route>
);
## Instruction:
Add route that catches 404's
## Code After:
import React from "react";
import {IndexRedirect, Route} from "react-router";
import Admin from "app/layouts/Admin";
import RouteNotFound from "app/components/RouteNotFound";
import users from "app/users/routes";
export default (
<Route path="/">
<IndexRedirect to="admin/users"/>
<Route component={Admin} path="admin">
<IndexRedirect to="users"/>
{users}
<Route path="*" component={RouteNotFound}/>
</Route>
</Route>
);
|
cbf929904f63caf1f65e168bf77f564fcaab7184 | lib/assets/javascripts/cartodb3/editor/style/style-form/style-properties-form/style-shape-properties-form-model.js | lib/assets/javascripts/cartodb3/editor/style/style-form/style-properties-form/style-shape-properties-form-model.js | var _ = require('underscore');
var StylesFactory = require('../../styles-factory');
var StyleFormDefaultModel = require('../style-form-default-model');
module.exports = StyleFormDefaultModel.extend({
parse: function (r) {
var geom = r.geom;
var attrs = {
fill: r.fill,
stroke: r.stroke,
blending: r.blending,
resolution: r.resolution
};
var isAggregatedType = _.contains(StylesFactory.getAggregationTypes(), r.type);
if (isAggregatedType || (geom && geom.getSimpleType() === 'polygon')) {
delete attrs.fill.size;
}
if (geom && geom.getSimpleType() === 'line') {
delete attrs.fill;
}
if (r.type === 'heatmap') {
attrs = _.omit(attrs, 'stroke', 'blending');
} else {
attrs = _.omit(attrs, 'resolution');
}
return attrs;
},
_onChange: function () {
this._styleModel.set(_.clone(this.attributes));
}
});
| var _ = require('underscore');
var StylesFactory = require('../../styles-factory');
var StyleFormDefaultModel = require('../style-form-default-model');
module.exports = StyleFormDefaultModel.extend({
parse: function (r) {
var geom = r.geom;
var attrs = {
fill: r.fill,
stroke: r.stroke,
blending: r.blending,
resolution: r.resolution
};
var isAggregatedType = _.contains(StylesFactory.getAggregationTypes(), r.type);
if (isAggregatedType || (geom && geom.getSimpleType() === 'polygon')) {
if (attrs.fill.size) {
attrs.fill = _.omit(attrs.fill, 'size');
}
}
if (geom && geom.getSimpleType() === 'line') {
attrs = _.omit(attrs.fill);
}
if (r.type === 'heatmap') {
attrs = _.omit(attrs, 'stroke', 'blending');
} else {
attrs = _.omit(attrs, 'resolution');
}
return attrs;
},
_onChange: function () {
this._styleModel.set(_.clone(this.attributes));
}
});
| Create a new object instead of modifying original | Create a new object instead of modifying original
Fixing the root problem will require more severe changes, basically
avoiding passing objects by reference to prevent this kind of bugs.
| JavaScript | bsd-3-clause | CartoDB/cartodb,splashblot/dronedb,splashblot/dronedb,CartoDB/cartodb,CartoDB/cartodb,CartoDB/cartodb,splashblot/dronedb,CartoDB/cartodb,splashblot/dronedb,splashblot/dronedb | javascript | ## Code Before:
var _ = require('underscore');
var StylesFactory = require('../../styles-factory');
var StyleFormDefaultModel = require('../style-form-default-model');
module.exports = StyleFormDefaultModel.extend({
parse: function (r) {
var geom = r.geom;
var attrs = {
fill: r.fill,
stroke: r.stroke,
blending: r.blending,
resolution: r.resolution
};
var isAggregatedType = _.contains(StylesFactory.getAggregationTypes(), r.type);
if (isAggregatedType || (geom && geom.getSimpleType() === 'polygon')) {
delete attrs.fill.size;
}
if (geom && geom.getSimpleType() === 'line') {
delete attrs.fill;
}
if (r.type === 'heatmap') {
attrs = _.omit(attrs, 'stroke', 'blending');
} else {
attrs = _.omit(attrs, 'resolution');
}
return attrs;
},
_onChange: function () {
this._styleModel.set(_.clone(this.attributes));
}
});
## Instruction:
Create a new object instead of modifying original
Fixing the root problem will require more severe changes, basically
avoiding passing objects by reference to prevent this kind of bugs.
## Code After:
var _ = require('underscore');
var StylesFactory = require('../../styles-factory');
var StyleFormDefaultModel = require('../style-form-default-model');
module.exports = StyleFormDefaultModel.extend({
parse: function (r) {
var geom = r.geom;
var attrs = {
fill: r.fill,
stroke: r.stroke,
blending: r.blending,
resolution: r.resolution
};
var isAggregatedType = _.contains(StylesFactory.getAggregationTypes(), r.type);
if (isAggregatedType || (geom && geom.getSimpleType() === 'polygon')) {
if (attrs.fill.size) {
attrs.fill = _.omit(attrs.fill, 'size');
}
}
if (geom && geom.getSimpleType() === 'line') {
attrs = _.omit(attrs.fill);
}
if (r.type === 'heatmap') {
attrs = _.omit(attrs, 'stroke', 'blending');
} else {
attrs = _.omit(attrs, 'resolution');
}
return attrs;
},
_onChange: function () {
this._styleModel.set(_.clone(this.attributes));
}
});
|
09ecbafd75e8b90b34b201826b16741883ab6dad | README.md | README.md |
Rerun last command with different command line options
### Installation
```sh
ln -siv `pwd`/r /usr/local/bin
```
### Usage
```sh
$ ls a b c
$ r -G
-> ls -G a b c
```
|
Rerun last command with different command line options
### Installation
```sh
ln -siv `pwd`/r /usr/local/bin
```
### Usage
```sh
$ ls a b c
$ r -G
-> ls -G a b c
```
### Longer Explanation
Commands in the terminal are usually called with the following syntax:
```
$ builtin [-options] [args ...]
```
These commands are executed in the running shell's process. If a command contains
a slash (`/`), the shell does not execute the builtin program (which is contained
in `$PATH`). The purpose of this script is to easily rerun a program (either
a builtin or an specified executable) with a different set of options than
initially specified.
| Add a bit longer description | Add a bit longer description
| Markdown | mit | jasonkliu/redo | markdown | ## Code Before:
Rerun last command with different command line options
### Installation
```sh
ln -siv `pwd`/r /usr/local/bin
```
### Usage
```sh
$ ls a b c
$ r -G
-> ls -G a b c
```
## Instruction:
Add a bit longer description
## Code After:
Rerun last command with different command line options
### Installation
```sh
ln -siv `pwd`/r /usr/local/bin
```
### Usage
```sh
$ ls a b c
$ r -G
-> ls -G a b c
```
### Longer Explanation
Commands in the terminal are usually called with the following syntax:
```
$ builtin [-options] [args ...]
```
These commands are executed in the running shell's process. If a command contains
a slash (`/`), the shell does not execute the builtin program (which is contained
in `$PATH`). The purpose of this script is to easily rerun a program (either
a builtin or an specified executable) with a different set of options than
initially specified.
|
c3201037ef44d9e3074a8adbcedd30ea5436f85f | tox.ini | tox.ini | [tox]
envlist = py{27,34,35}-dj{18,19}
[testenv]
deps =
dj18: django==1.8
dj19: django==1.9
py27: mock
factory_boy
coverage
commands =
coverage run -a --rcfile={toxinidir}/.coveragerc setup.py test
| [tox]
envlist = py{27,34,35,36}-dj{18,19,110}
[testenv]
deps =
dj18: django==1.8
dj19: django==1.9
dj110: django==1.10
py27: mock
factory_boy
coverage
commands =
coverage run -a --rcfile={toxinidir}/.coveragerc setup.py test
| Add Python 3.6+Django 1.10 to build | Add Python 3.6+Django 1.10 to build
| INI | mit | kako-nawao/django-group-by | ini | ## Code Before:
[tox]
envlist = py{27,34,35}-dj{18,19}
[testenv]
deps =
dj18: django==1.8
dj19: django==1.9
py27: mock
factory_boy
coverage
commands =
coverage run -a --rcfile={toxinidir}/.coveragerc setup.py test
## Instruction:
Add Python 3.6+Django 1.10 to build
## Code After:
[tox]
envlist = py{27,34,35,36}-dj{18,19,110}
[testenv]
deps =
dj18: django==1.8
dj19: django==1.9
dj110: django==1.10
py27: mock
factory_boy
coverage
commands =
coverage run -a --rcfile={toxinidir}/.coveragerc setup.py test
|
9031bfef780b27ecb3a93136e62197ab044a5ac7 | README.md | README.md |
[](https://codeclimate.com/github/gitlabhq/gitlab-ci-runner)

## This is Runner repository. This code responsible for running tests
### Requirements
**The project is designed for the Linux operating system.**
We officially support (recent versions of) these Linux distributions:
- Ubuntu Linux
- Debian/GNU Linux
### Installation
```bash
# Get code
git clone https://github.com/gitlabhq/gitlab-ci-runner.git
# Enter code dir
cd gitlab-ci-runner
# Install dependencies
gem install bundler
bundle install
# Install runner in interactive mode
bundle exec ./bin/install
```
### Run
```bash
bundle exec ./bin/runner
```
|
[](https://codeclimate.com/github/gitlabhq/gitlab-ci-runner)

## This is Runner repository. This code responsible for running tests
### Requirements
**The project is designed for the Linux operating system.**
We officially support (recent versions of) these Linux distributions:
- Ubuntu Linux
- Debian/GNU Linux
### Installation
```bash
# Get code
git clone https://github.com/gitlabhq/gitlab-ci-runner.git
# Enter code dir
cd gitlab-ci-runner
# Install dependencies
gem install bundler
bundle install
# Install runner in interactive mode
bundle exec ./bin/install
# SSH into your GitLab server and confirm to add host key to known_hosts
ssh git@<your gitlab url>
```
### Run
```bash
bundle exec ./bin/runner
```
| Add extra step: SSH into GitLab to add host ID to known_hosts | Add extra step: SSH into GitLab to add host ID to known_hosts | Markdown | mit | cui-liqiang/gitlab-ci-runner,Wolfium/gitlab-ci-runner-centos,gitlabhq/gitlab-ci-runner,moos3/gitlab-ci-runner-centos,gitlabhq/gitlab-ci-runner,moos3/gitlab-ci-runner-centos,mfittko/gitlab-ci-runner,andrewmunsell/gitlab-ci-runner,Wolfium/gitlab-ci-runner-centos,mfittko/gitlab-ci-runner,cui-liqiang/gitlab-ci-runner,andrewmunsell/gitlab-ci-runner,myrubapa/gitlab-ci-runner-centos,myrubapa/gitlab-ci-runner-centos | markdown | ## Code Before:
[](https://codeclimate.com/github/gitlabhq/gitlab-ci-runner)

## This is Runner repository. This code responsible for running tests
### Requirements
**The project is designed for the Linux operating system.**
We officially support (recent versions of) these Linux distributions:
- Ubuntu Linux
- Debian/GNU Linux
### Installation
```bash
# Get code
git clone https://github.com/gitlabhq/gitlab-ci-runner.git
# Enter code dir
cd gitlab-ci-runner
# Install dependencies
gem install bundler
bundle install
# Install runner in interactive mode
bundle exec ./bin/install
```
### Run
```bash
bundle exec ./bin/runner
```
## Instruction:
Add extra step: SSH into GitLab to add host ID to known_hosts
## Code After:
[](https://codeclimate.com/github/gitlabhq/gitlab-ci-runner)

## This is Runner repository. This code responsible for running tests
### Requirements
**The project is designed for the Linux operating system.**
We officially support (recent versions of) these Linux distributions:
- Ubuntu Linux
- Debian/GNU Linux
### Installation
```bash
# Get code
git clone https://github.com/gitlabhq/gitlab-ci-runner.git
# Enter code dir
cd gitlab-ci-runner
# Install dependencies
gem install bundler
bundle install
# Install runner in interactive mode
bundle exec ./bin/install
# SSH into your GitLab server and confirm to add host key to known_hosts
ssh git@<your gitlab url>
```
### Run
```bash
bundle exec ./bin/runner
```
|
d8624f5fe754b3d3bcbb0836ecd9fdbb974b9ea1 | Library/Homebrew/unpack_strategy/executable.rb | Library/Homebrew/unpack_strategy/executable.rb | require_relative "uncompressed"
require "vendor/macho/macho"
module UnpackStrategy
class Executable < Uncompressed
def self.can_extract?(path:, magic_number:)
return true if magic_number.match?(/\A#!\s*\S+/n)
begin
path.file? && MachO.open(path).header.executable?
rescue MachO::NotAMachOError
false
end
end
end
end
| require_relative "uncompressed"
module UnpackStrategy
class Executable < Uncompressed
def self.can_extract?(path:, magic_number:)
magic_number.match?(/\A#!\s*\S+/n)
end
end
end
| Remove `MachO` check for `Executable`. | Remove `MachO` check for `Executable`.
| Ruby | bsd-2-clause | claui/brew,sjackman/homebrew,konqui/brew,nandub/brew,mahori/brew,vitorgalvao/brew,Linuxbrew/brew,reitermarkus/brew,EricFromCanada/brew,EricFromCanada/brew,JCount/brew,claui/brew,Homebrew/brew,JCount/brew,vitorgalvao/brew,MikeMcQuaid/brew,konqui/brew,maxim-belkin/brew,JCount/brew,vitorgalvao/brew,maxim-belkin/brew,Linuxbrew/brew,sjackman/homebrew,DomT4/brew,konqui/brew,sjackman/homebrew,konqui/brew,EricFromCanada/brew,reitermarkus/brew,claui/brew,EricFromCanada/brew,DomT4/brew,MikeMcQuaid/brew,nandub/brew,mahori/brew,claui/brew,Homebrew/brew,maxim-belkin/brew,mahori/brew,reitermarkus/brew,nandub/brew,Homebrew/brew,MikeMcQuaid/brew,vitorgalvao/brew,DomT4/brew,Linuxbrew/brew,DomT4/brew,JCount/brew,sjackman/homebrew,MikeMcQuaid/brew,nandub/brew,Linuxbrew/brew,reitermarkus/brew,mahori/brew,Homebrew/brew | ruby | ## Code Before:
require_relative "uncompressed"
require "vendor/macho/macho"
module UnpackStrategy
class Executable < Uncompressed
def self.can_extract?(path:, magic_number:)
return true if magic_number.match?(/\A#!\s*\S+/n)
begin
path.file? && MachO.open(path).header.executable?
rescue MachO::NotAMachOError
false
end
end
end
end
## Instruction:
Remove `MachO` check for `Executable`.
## Code After:
require_relative "uncompressed"
module UnpackStrategy
class Executable < Uncompressed
def self.can_extract?(path:, magic_number:)
magic_number.match?(/\A#!\s*\S+/n)
end
end
end
|
4fd0225ad318d05379d95c2184c4a78ed7fadcd8 | recipe-server/normandy/recipes/migrations/0045_update_action_hashes.py | recipe-server/normandy/recipes/migrations/0045_update_action_hashes.py | from __future__ import unicode_literals
import hashlib
from base64 import b64encode, urlsafe_b64encode
from django.db import migrations
def make_hashes_urlsafe_sri(apps, schema_editor):
Action = apps.get_model('recipes', 'Action')
for action in Action.objects.all():
data = action.implementation.encode()
digest = hashlib.sha384(data).digest()
data_hash = urlsafe_b64encode(digest)
action.implementation_hash = 'sha384-' + data_hash.decode()
action.save()
def make_hashes_sha1(apps, schema_editor):
Action = apps.get_model('recipes', 'Action')
for action in Action.objects.all():
data = action.implementation.encode()
data_hash = hashlib.sha1(data).hexdigest()
action.implementation_hash = data_hash
action.save()
class Migration(migrations.Migration):
dependencies = [
('recipes', '0044_auto_20170801_0010'),
]
operations = [
migrations.RunPython(make_hashes_urlsafe_sri, make_hashes_sha1),
]
| from __future__ import unicode_literals
import hashlib
from base64 import urlsafe_b64encode
from django.db import migrations
def make_hashes_urlsafe_sri(apps, schema_editor):
Action = apps.get_model('recipes', 'Action')
for action in Action.objects.all():
data = action.implementation.encode()
digest = hashlib.sha384(data).digest()
data_hash = urlsafe_b64encode(digest)
action.implementation_hash = 'sha384-' + data_hash.decode()
action.save()
def make_hashes_sha1(apps, schema_editor):
Action = apps.get_model('recipes', 'Action')
for action in Action.objects.all():
data = action.implementation.encode()
data_hash = hashlib.sha1(data).hexdigest()
action.implementation_hash = data_hash
action.save()
class Migration(migrations.Migration):
dependencies = [
('recipes', '0044_auto_20170801_0010'),
]
operations = [
migrations.RunPython(make_hashes_urlsafe_sri, make_hashes_sha1),
]
| Fix lint checks in migration recipes/0045. | Fix lint checks in migration recipes/0045.
| Python | mpl-2.0 | mozilla/normandy,mozilla/normandy,mozilla/normandy,mozilla/normandy | python | ## Code Before:
from __future__ import unicode_literals
import hashlib
from base64 import b64encode, urlsafe_b64encode
from django.db import migrations
def make_hashes_urlsafe_sri(apps, schema_editor):
Action = apps.get_model('recipes', 'Action')
for action in Action.objects.all():
data = action.implementation.encode()
digest = hashlib.sha384(data).digest()
data_hash = urlsafe_b64encode(digest)
action.implementation_hash = 'sha384-' + data_hash.decode()
action.save()
def make_hashes_sha1(apps, schema_editor):
Action = apps.get_model('recipes', 'Action')
for action in Action.objects.all():
data = action.implementation.encode()
data_hash = hashlib.sha1(data).hexdigest()
action.implementation_hash = data_hash
action.save()
class Migration(migrations.Migration):
dependencies = [
('recipes', '0044_auto_20170801_0010'),
]
operations = [
migrations.RunPython(make_hashes_urlsafe_sri, make_hashes_sha1),
]
## Instruction:
Fix lint checks in migration recipes/0045.
## Code After:
from __future__ import unicode_literals
import hashlib
from base64 import urlsafe_b64encode
from django.db import migrations
def make_hashes_urlsafe_sri(apps, schema_editor):
Action = apps.get_model('recipes', 'Action')
for action in Action.objects.all():
data = action.implementation.encode()
digest = hashlib.sha384(data).digest()
data_hash = urlsafe_b64encode(digest)
action.implementation_hash = 'sha384-' + data_hash.decode()
action.save()
def make_hashes_sha1(apps, schema_editor):
Action = apps.get_model('recipes', 'Action')
for action in Action.objects.all():
data = action.implementation.encode()
data_hash = hashlib.sha1(data).hexdigest()
action.implementation_hash = data_hash
action.save()
class Migration(migrations.Migration):
dependencies = [
('recipes', '0044_auto_20170801_0010'),
]
operations = [
migrations.RunPython(make_hashes_urlsafe_sri, make_hashes_sha1),
]
|
b60f4373125dfbf2a38940e1a34c8c94162e5a06 | ci/README.md | ci/README.md |
Follow the steps in [BOSH: Configuring Concourse Pipelines][link], using "bosh-agent" as the value for `PROJECT_NAME`.
[link]: (https://github.com/cloudfoundry/bosh/blob/develop/docs/configuring_concourse_pipelines.md)
|
Follow the steps in [BOSH: Configuring Concourse Pipelines](https://github.com/cloudfoundry/bosh/blob/develop/docs/configuring_concourse_pipelines.md), using "bosh-agent" as the value for `PROJECT_NAME`.
| Fix link in CI readme | Fix link in CI readme
Signed-off-by: Ben Moss <80bec717c12290923c80384e0a5fc28cdf75a97e@pivotal.io>
| Markdown | apache-2.0 | gu-bin/bosh-agent,gu-bin/bosh-agent,gu-bin/bosh-agent,mattcui/bosh-agent,cloudfoundry/bosh-agent,cloudfoundry/bosh-agent,mattcui/bosh-agent,mattcui/bosh-agent | markdown | ## Code Before:
Follow the steps in [BOSH: Configuring Concourse Pipelines][link], using "bosh-agent" as the value for `PROJECT_NAME`.
[link]: (https://github.com/cloudfoundry/bosh/blob/develop/docs/configuring_concourse_pipelines.md)
## Instruction:
Fix link in CI readme
Signed-off-by: Ben Moss <80bec717c12290923c80384e0a5fc28cdf75a97e@pivotal.io>
## Code After:
Follow the steps in [BOSH: Configuring Concourse Pipelines](https://github.com/cloudfoundry/bosh/blob/develop/docs/configuring_concourse_pipelines.md), using "bosh-agent" as the value for `PROJECT_NAME`.
|
e30ef58232a578e81ef2478991f346aa2a2b86cc | Sources/Quick/QuickMain.swift | Sources/Quick/QuickMain.swift | import XCTest
// NOTE: This file is not intended to be included in the Xcode project or CocoaPods.
// It is picked up by the Swift Package Manager during its build process.
@noreturn public func QCKMain(specs: [XCTestCase], configurations: [QuickConfiguration.Type] = []) {
// Perform all configuration (ensures that shared examples have been discovered)
World.sharedWorld.configure { configuration in
for configurationClass in configurations {
configurationClass.configure(configuration)
}
}
World.sharedWorld.finalizeConfiguration()
// Gather all examples (ensures suite hooks have been discovered)
for case let spec as QuickSpec in specs {
spec.gatherExamplesIfNeeded()
}
XCTMain(specs)
}
| import XCTest
// NOTE: This file is not intended to be included in the Xcode project or CocoaPods.
// It is picked up by the Swift Package Manager during its build process.
/// When using Quick with swift-corelibs-xctest, automatic discovery of specs and
/// configurations is not available. Instead, you should create a standalone
/// executable and call this function from its main.swift file. This will execute
/// the specs and then terminate the process with an exit code of 0 if the tests
/// passed, or 1 if there were any failures.
///
/// Quick is known to work with the DEVELOPMENT-SNAPSHOT-2016-02-03-a Swift toolchain
@noreturn public func QCKMain(specs: [XCTestCase], configurations: [QuickConfiguration.Type] = []) {
// Perform all configuration (ensures that shared examples have been discovered)
World.sharedWorld.configure { configuration in
for configurationClass in configurations {
configurationClass.configure(configuration)
}
}
World.sharedWorld.finalizeConfiguration()
// Gather all examples (ensures suite hooks have been discovered)
for case let spec as QuickSpec in specs {
spec.gatherExamplesIfNeeded()
}
XCTMain(specs)
}
| Add basic documentation for QCKMain | Add basic documentation for QCKMain
| Swift | apache-2.0 | marciok/Quick,jeffh/Quick,mokagio/Quick,dgdosen/Quick,jeffh/Quick,Quick/Quick,phatblat/Quick,ashfurrow/Quick,marciok/Quick,jeffh/Quick,ikesyo/Quick,ashfurrow/Quick,Quick/Quick,dgdosen/Quick,dgdosen/Quick,phatblat/Quick,phatblat/Quick,Quick/Quick,mokagio/Quick,marciok/Quick,ikesyo/Quick,DanielAsher/Quick,ikesyo/Quick,Quick/Quick,DanielAsher/Quick,phatblat/Quick,mokagio/Quick | swift | ## Code Before:
import XCTest
// NOTE: This file is not intended to be included in the Xcode project or CocoaPods.
// It is picked up by the Swift Package Manager during its build process.
@noreturn public func QCKMain(specs: [XCTestCase], configurations: [QuickConfiguration.Type] = []) {
// Perform all configuration (ensures that shared examples have been discovered)
World.sharedWorld.configure { configuration in
for configurationClass in configurations {
configurationClass.configure(configuration)
}
}
World.sharedWorld.finalizeConfiguration()
// Gather all examples (ensures suite hooks have been discovered)
for case let spec as QuickSpec in specs {
spec.gatherExamplesIfNeeded()
}
XCTMain(specs)
}
## Instruction:
Add basic documentation for QCKMain
## Code After:
import XCTest
// NOTE: This file is not intended to be included in the Xcode project or CocoaPods.
// It is picked up by the Swift Package Manager during its build process.
/// When using Quick with swift-corelibs-xctest, automatic discovery of specs and
/// configurations is not available. Instead, you should create a standalone
/// executable and call this function from its main.swift file. This will execute
/// the specs and then terminate the process with an exit code of 0 if the tests
/// passed, or 1 if there were any failures.
///
/// Quick is known to work with the DEVELOPMENT-SNAPSHOT-2016-02-03-a Swift toolchain
@noreturn public func QCKMain(specs: [XCTestCase], configurations: [QuickConfiguration.Type] = []) {
// Perform all configuration (ensures that shared examples have been discovered)
World.sharedWorld.configure { configuration in
for configurationClass in configurations {
configurationClass.configure(configuration)
}
}
World.sharedWorld.finalizeConfiguration()
// Gather all examples (ensures suite hooks have been discovered)
for case let spec as QuickSpec in specs {
spec.gatherExamplesIfNeeded()
}
XCTMain(specs)
}
|
6048ade60f9dc56387258fab4dc54a7dcef51e1c | config/locales/de.yml | config/locales/de.yml | de:
activerecord:
models:
salary: Lohn
attributes:
salary:
amount: Bruttolohn
net_amount: Nettolohn
bvg_amount: BVG
social_amount: AHV/IV/EO/ALV/NBU
customer: Arbeitgeber
company: Arbeitnehmer
crud:
title:
day:
new: Abrechnung von %{day}
bookyt:
main_navigation:
days: Tagesabschlüsse
new_day: Tagesabschluss
tooltip:
new_day: Hier können Sie einen neuen Tagesabschluss gemäss Registrierkasse buchen
days: Hier finden Sie eine Liste aller Tagesabschlüsse
| de:
activerecord:
models:
salary: Lohn
attributes:
salary:
amount: Bruttolohn
net_amount: Nettolohn
bvg_amount: BVG
social_amount: AHV/IV/EO/ALV/NBU
employer: Arbeitgeber
employee: Arbeitnehmer
crud:
title:
day:
new: Abrechnung von %{day}
bookyt:
main_navigation:
days: Tagesabschlüsse
new_day: Tagesabschluss
tooltip:
new_day: Hier können Sie einen neuen Tagesabschluss gemäss Registrierkasse buchen
days: Hier finden Sie eine Liste aller Tagesabschlüsse
| Fix locale for salary.employe{r,e} keys. | Fix locale for salary.employe{r,e} keys.
| YAML | mit | silvermind/bookyt_salary,silvermind/bookyt_salary,huerlisi/bookyt_salary,silvermind/bookyt_salary,huerlisi/bookyt_salary,huerlisi/bookyt_salary | yaml | ## Code Before:
de:
activerecord:
models:
salary: Lohn
attributes:
salary:
amount: Bruttolohn
net_amount: Nettolohn
bvg_amount: BVG
social_amount: AHV/IV/EO/ALV/NBU
customer: Arbeitgeber
company: Arbeitnehmer
crud:
title:
day:
new: Abrechnung von %{day}
bookyt:
main_navigation:
days: Tagesabschlüsse
new_day: Tagesabschluss
tooltip:
new_day: Hier können Sie einen neuen Tagesabschluss gemäss Registrierkasse buchen
days: Hier finden Sie eine Liste aller Tagesabschlüsse
## Instruction:
Fix locale for salary.employe{r,e} keys.
## Code After:
de:
activerecord:
models:
salary: Lohn
attributes:
salary:
amount: Bruttolohn
net_amount: Nettolohn
bvg_amount: BVG
social_amount: AHV/IV/EO/ALV/NBU
employer: Arbeitgeber
employee: Arbeitnehmer
crud:
title:
day:
new: Abrechnung von %{day}
bookyt:
main_navigation:
days: Tagesabschlüsse
new_day: Tagesabschluss
tooltip:
new_day: Hier können Sie einen neuen Tagesabschluss gemäss Registrierkasse buchen
days: Hier finden Sie eine Liste aller Tagesabschlüsse
|
95d6eeaf9ff4d2b28799dc4a86eda98c5d73c358 | server/cat/catModel.js | server/cat/catModel.js | var mongoose = require('mongoose');
var CatSchema = new mongoose.Schema({
id: {
type: String,
required: true,
unique: true
},
name: String,
age: String,
description: String,
photo: String,
phone: String,
email: String,
zip: String
});
module.exports = mongoose.model('Cat', CatSchema);
| var mongoose = require('mongoose');
var CatSchema = new mongoose.Schema({
id: {
type: String,
required: true,
unique: true
},
name: String,
age: String,
description: String,
photo: String,
'contact-name': String,
phone: String,
email: String,
zip: String
});
module.exports = mongoose.model('Cat', CatSchema);
| Update CatSchema to include 'contact-name' | Update CatSchema to include 'contact-name'
| JavaScript | mit | reinaisnothere/mvp,reinaisnothere/mvp | javascript | ## Code Before:
var mongoose = require('mongoose');
var CatSchema = new mongoose.Schema({
id: {
type: String,
required: true,
unique: true
},
name: String,
age: String,
description: String,
photo: String,
phone: String,
email: String,
zip: String
});
module.exports = mongoose.model('Cat', CatSchema);
## Instruction:
Update CatSchema to include 'contact-name'
## Code After:
var mongoose = require('mongoose');
var CatSchema = new mongoose.Schema({
id: {
type: String,
required: true,
unique: true
},
name: String,
age: String,
description: String,
photo: String,
'contact-name': String,
phone: String,
email: String,
zip: String
});
module.exports = mongoose.model('Cat', CatSchema);
|
9ae69cbc9038a11be9423f436e279ce8817610d3 | src/addDigits.js | src/addDigits.js | /**
* Copyright 2003-present Greg Hurrell. All rights reserved.
* Licensed under the terms of the MIT license.
*
* @flow
*/
export default function addDigits(
aDigits: Array<number>,
bDigits: Array<number>,
base: number
): Array<number> {
let result = [];
let carry = 0;
const aLength = aDigits.length;
const bLength = bDigits.length;
for (let i = 0; i < aLength || i < bLength || carry; i++) {
const aDigit = i < aLength ? aDigits[aLength - i - 1] : 0;
const bDigit = i < bLength ? bDigits[bLength - i - 1] : 0;
const sum = aDigit + bDigit + carry;
result.push(sum % base);
// ~~ here is the equivalent of Math.floor; used to avoid V8 de-opt,
// "Reference to a variable which requires dynamic lookup".
carry = ~~(sum / base);
}
return result.length ? result.reverse() : [0];
}
| /**
* Copyright 2003-present Greg Hurrell. All rights reserved.
* Licensed under the terms of the MIT license.
*
* @flow
*/
export default function addDigits(
aDigits: Array<number>,
bDigits: Array<number>,
base: number
): Array<number> {
let result = [];
let carry = 0;
const aLength = aDigits.length;
const bLength = bDigits.length;
for (let i = 0; i < aLength || i < bLength || carry; i++) {
const aDigit = i < aLength ? aDigits[aLength - i - 1] : 0;
const bDigit = i < bLength ? bDigits[bLength - i - 1] : 0;
const sum = aDigit + bDigit + carry;
result.unshift(sum % base);
// ~~ here is the equivalent of Math.floor; used to avoid V8 de-opt,
// "Reference to a variable which requires dynamic lookup".
carry = ~~(sum / base);
}
return result.length ? result : [0];
}
| Revert "Replace `unshift()` with `push()` plus `reverse()`" | Revert "Replace `unshift()` with `push()` plus `reverse()`"
This reverts commit f21c36ed85f277522593f1aed902d18f787a34b1.
| JavaScript | mit | wincent/hextrapolate,wincent/hextrapolate,wincent/hextrapolate | javascript | ## Code Before:
/**
* Copyright 2003-present Greg Hurrell. All rights reserved.
* Licensed under the terms of the MIT license.
*
* @flow
*/
export default function addDigits(
aDigits: Array<number>,
bDigits: Array<number>,
base: number
): Array<number> {
let result = [];
let carry = 0;
const aLength = aDigits.length;
const bLength = bDigits.length;
for (let i = 0; i < aLength || i < bLength || carry; i++) {
const aDigit = i < aLength ? aDigits[aLength - i - 1] : 0;
const bDigit = i < bLength ? bDigits[bLength - i - 1] : 0;
const sum = aDigit + bDigit + carry;
result.push(sum % base);
// ~~ here is the equivalent of Math.floor; used to avoid V8 de-opt,
// "Reference to a variable which requires dynamic lookup".
carry = ~~(sum / base);
}
return result.length ? result.reverse() : [0];
}
## Instruction:
Revert "Replace `unshift()` with `push()` plus `reverse()`"
This reverts commit f21c36ed85f277522593f1aed902d18f787a34b1.
## Code After:
/**
* Copyright 2003-present Greg Hurrell. All rights reserved.
* Licensed under the terms of the MIT license.
*
* @flow
*/
export default function addDigits(
aDigits: Array<number>,
bDigits: Array<number>,
base: number
): Array<number> {
let result = [];
let carry = 0;
const aLength = aDigits.length;
const bLength = bDigits.length;
for (let i = 0; i < aLength || i < bLength || carry; i++) {
const aDigit = i < aLength ? aDigits[aLength - i - 1] : 0;
const bDigit = i < bLength ? bDigits[bLength - i - 1] : 0;
const sum = aDigit + bDigit + carry;
result.unshift(sum % base);
// ~~ here is the equivalent of Math.floor; used to avoid V8 de-opt,
// "Reference to a variable which requires dynamic lookup".
carry = ~~(sum / base);
}
return result.length ? result : [0];
}
|
8a0510aac90cd48c125793ffcc9f31b203dfa062 | .goxc.json | .goxc.json | {
"ConfigVersion": "0.9",
"ArtifactsDest": "target",
"Arch": "386,amd64",
"Os": "darwin,linux,windows",
"PackageVersion": "0.25.0",
"PrereleaseInfo": "snapshot",
"Verbosity": "v",
"Resources": {
"Include": "INSTALL*,README*,LICENSE*",
"Exclude": "*.go"
},
"TaskSettings": {
"archive": {
"os": {
"linux": "TarGz",
"darwin": "TarGz"
}
},
"downloads-page": {
"fileheader": "",
"filename": ""
},
"pkg-build": {
"metadata": {
"description": "Fast GitHub command line client",
"maintainer": "Jingwen Owen Ou (http://owenou.com)"
},
"metadata-deb": {
"Depends": "",
"Homepage": "http://owenou.com/gh"
}
}
}
}
| {
"ConfigVersion": "0.9",
"ArtifactsDest": "target",
"Arch": "386,amd64",
"Os": "darwin,linux,windows",
"PackageVersion": "0.25.0",
"PrereleaseInfo": "snapshot",
"Verbosity": "v",
"Resources": {
"Include": "INSTALL*,README*,LICENSE*",
"Exclude": "*.go"
},
"TaskSettings": {
"downloads-page": {
"fileheader": "",
"filename": ""
},
"pkg-build": {
"metadata": {
"description": "Fast GitHub command line client",
"maintainer": "Jingwen Owen Ou (http://owenou.com)"
},
"metadata-deb": {
"Depends": "",
"Homepage": "http://owenou.com/gh"
}
}
}
}
| Use zip for darwin build | Use zip for darwin build | JSON | mit | jingweno/gh,jingweno/gh,beni55/gh,jingweno/gh,beni55/gh,beni55/gh | json | ## Code Before:
{
"ConfigVersion": "0.9",
"ArtifactsDest": "target",
"Arch": "386,amd64",
"Os": "darwin,linux,windows",
"PackageVersion": "0.25.0",
"PrereleaseInfo": "snapshot",
"Verbosity": "v",
"Resources": {
"Include": "INSTALL*,README*,LICENSE*",
"Exclude": "*.go"
},
"TaskSettings": {
"archive": {
"os": {
"linux": "TarGz",
"darwin": "TarGz"
}
},
"downloads-page": {
"fileheader": "",
"filename": ""
},
"pkg-build": {
"metadata": {
"description": "Fast GitHub command line client",
"maintainer": "Jingwen Owen Ou (http://owenou.com)"
},
"metadata-deb": {
"Depends": "",
"Homepage": "http://owenou.com/gh"
}
}
}
}
## Instruction:
Use zip for darwin build
## Code After:
{
"ConfigVersion": "0.9",
"ArtifactsDest": "target",
"Arch": "386,amd64",
"Os": "darwin,linux,windows",
"PackageVersion": "0.25.0",
"PrereleaseInfo": "snapshot",
"Verbosity": "v",
"Resources": {
"Include": "INSTALL*,README*,LICENSE*",
"Exclude": "*.go"
},
"TaskSettings": {
"downloads-page": {
"fileheader": "",
"filename": ""
},
"pkg-build": {
"metadata": {
"description": "Fast GitHub command line client",
"maintainer": "Jingwen Owen Ou (http://owenou.com)"
},
"metadata-deb": {
"Depends": "",
"Homepage": "http://owenou.com/gh"
}
}
}
}
|
85fa57a6def7a1ab0ee9a460ac6fc3eb5443d039 | package.js | package.js | Package.describe({
"summary": "Add feature flagging to Meteor"
});
Package.on_use(function (api) {
api.use('coffeescript', ['server', 'client']);
api.use(['deps','handlebars','jquery'], 'client');
api.use('underscore', 'server');
api.use('accounts-base', ['client'])
api.add_files('server/server_flag.coffee', 'server')
api.add_files('client/client_flag.coffee', 'client')
if (typeof api.export !== 'undefined'){
api.export('FeatureFlag', 'server');
}
});
Package.on_test(function(api) {
api.use('coffeescript', ['server', 'client']);
api.use(['meteor-feature-flag',"tinytest", "test-helpers"])
api.use('underscore', 'server');
api.add_files('test/feature_flag.coffee', 'server')
});
| Package.describe({
"summary": "Add feature flagging to Meteor"
});
Package.on_use(function (api) {
api.use('coffeescript', ['server', 'client']);
api.use(['deps','ui','templating', 'jquery'], 'client');
api.use('underscore', 'server');
api.use('accounts-base', ['client'])
api.add_files('server/server_flag.coffee', 'server')
api.add_files('client/client_flag.coffee', 'client')
if (typeof api.export !== 'undefined'){
api.export('FeatureFlag', 'server');
}
});
Package.on_test(function(api) {
api.use('coffeescript', ['server', 'client']);
api.use(['meteor-feature-flag',"tinytest", "test-helpers"])
api.use('underscore', 'server');
api.add_files('test/feature_flag.coffee', 'server')
});
| Update deps to use new UI helpers | Update deps to use new UI helpers
| JavaScript | mit | recursivefaults/meteor-feature-flag | javascript | ## Code Before:
Package.describe({
"summary": "Add feature flagging to Meteor"
});
Package.on_use(function (api) {
api.use('coffeescript', ['server', 'client']);
api.use(['deps','handlebars','jquery'], 'client');
api.use('underscore', 'server');
api.use('accounts-base', ['client'])
api.add_files('server/server_flag.coffee', 'server')
api.add_files('client/client_flag.coffee', 'client')
if (typeof api.export !== 'undefined'){
api.export('FeatureFlag', 'server');
}
});
Package.on_test(function(api) {
api.use('coffeescript', ['server', 'client']);
api.use(['meteor-feature-flag',"tinytest", "test-helpers"])
api.use('underscore', 'server');
api.add_files('test/feature_flag.coffee', 'server')
});
## Instruction:
Update deps to use new UI helpers
## Code After:
Package.describe({
"summary": "Add feature flagging to Meteor"
});
Package.on_use(function (api) {
api.use('coffeescript', ['server', 'client']);
api.use(['deps','ui','templating', 'jquery'], 'client');
api.use('underscore', 'server');
api.use('accounts-base', ['client'])
api.add_files('server/server_flag.coffee', 'server')
api.add_files('client/client_flag.coffee', 'client')
if (typeof api.export !== 'undefined'){
api.export('FeatureFlag', 'server');
}
});
Package.on_test(function(api) {
api.use('coffeescript', ['server', 'client']);
api.use(['meteor-feature-flag',"tinytest", "test-helpers"])
api.use('underscore', 'server');
api.add_files('test/feature_flag.coffee', 'server')
});
|
fd8cfaaf7897076e1acd73becd0fbc940ad0add7 | .travis.yml | .travis.yml | language: ruby
rvm:
- 1.9.3
- 2.0.0
env:
global:
secure: LW/HXahV3KYeaFjHXboWs37GHOeABPHkjOFgZN79vSz2ClJSckMAAU/sbQw6HWtIzIHlRawX4sDTd0KIXEpFH6hwWevwXMgLpNzRh2utdPwWuN96+/KrwPI/vnYjtFqpxouQqWEma5SlN1HGxrs5qCKmIBNplQGbpDVxiSfyWEo=
| language: ruby
rvm:
- 1.9.3
- 2.0.0
- rbx-19mode
env:
global:
secure: LW/HXahV3KYeaFjHXboWs37GHOeABPHkjOFgZN79vSz2ClJSckMAAU/sbQw6HWtIzIHlRawX4sDTd0KIXEpFH6hwWevwXMgLpNzRh2utdPwWuN96+/KrwPI/vnYjtFqpxouQqWEma5SlN1HGxrs5qCKmIBNplQGbpDVxiSfyWEo=
| Add rubinius in 1.9 mode to Travis CI builds | Add rubinius in 1.9 mode to Travis CI builds
| YAML | mit | gussan/pronto,treble37/pronto,HaiTo/pronto,mmozuras/pronto,Zauberstuhl/pronto,jhass/pronto,prontolabs/pronto,mvz/pronto,aergonaut/pronto | yaml | ## Code Before:
language: ruby
rvm:
- 1.9.3
- 2.0.0
env:
global:
secure: LW/HXahV3KYeaFjHXboWs37GHOeABPHkjOFgZN79vSz2ClJSckMAAU/sbQw6HWtIzIHlRawX4sDTd0KIXEpFH6hwWevwXMgLpNzRh2utdPwWuN96+/KrwPI/vnYjtFqpxouQqWEma5SlN1HGxrs5qCKmIBNplQGbpDVxiSfyWEo=
## Instruction:
Add rubinius in 1.9 mode to Travis CI builds
## Code After:
language: ruby
rvm:
- 1.9.3
- 2.0.0
- rbx-19mode
env:
global:
secure: LW/HXahV3KYeaFjHXboWs37GHOeABPHkjOFgZN79vSz2ClJSckMAAU/sbQw6HWtIzIHlRawX4sDTd0KIXEpFH6hwWevwXMgLpNzRh2utdPwWuN96+/KrwPI/vnYjtFqpxouQqWEma5SlN1HGxrs5qCKmIBNplQGbpDVxiSfyWEo=
|
af6eb337c1bf72d78d3aac93d4d0c348d8053f6a | phpunit.xml.dist | phpunit.xml.dist | <?xml version="1.0" encoding="UTF-8"?>
<phpunit xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:noNamespaceSchemaLocation="https://schema.phpunit.de/8.5/phpunit.xsd"
bootstrap="vendor/autoload.php"
executionOrder="depends,defects"
forceCoversAnnotation="true"
beStrictAboutCoversAnnotation="false"
beStrictAboutOutputDuringTests="true"
beStrictAboutTodoAnnotatedTests="false"
verbose="true">
<testsuites>
<testsuite name="imbo-unit">
<directory suffix="Test.php">tests</directory>
</testsuite>
</testsuites>
<php>
<!-- General config -->
<const name="FIXTURES_DIR" value="./tests/Fixtures" />
<const name="PROJECT_ROOT" value="./" />
<const name="DATA_DIR" value="./data" />
</php>
<filter>
<whitelist processUncoveredFilesFromWhitelist="true">
<directory suffix=".php">src</directory>
<directory>tests/behat/features/bootstrap</directory>
</whitelist>
</filter>
<groups>
<exclude>
<group>local</group>
</exclude>
</groups>
</phpunit>
| <?xml version="1.0" encoding="UTF-8"?>
<phpunit
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:noNamespaceSchemaLocation="https://schema.phpunit.de/9.3/phpunit.xsd"
bootstrap="vendor/autoload.php"
executionOrder="depends,defects"
forceCoversAnnotation="true"
beStrictAboutCoversAnnotation="false"
beStrictAboutOutputDuringTests="true"
beStrictAboutTodoAnnotatedTests="false"
verbose="true"
>
<coverage processUncoveredFiles="true">
<include>
<directory suffix=".php">src</directory>
<directory>tests/behat/features/bootstrap</directory>
</include>
</coverage>
<testsuites>
<testsuite name="imbo-unit">
<directory suffix="Test.php">tests</directory>
</testsuite>
</testsuites>
<php>
<!-- General config -->
<const name="FIXTURES_DIR" value="./tests/Fixtures"/>
<const name="PROJECT_ROOT" value="./"/>
<const name="DATA_DIR" value="./data"/>
</php>
<groups>
<exclude>
<group>local</group>
</exclude>
</groups>
</phpunit>
| Update PHPUnit configuration to match latest schema | Update PHPUnit configuration to match latest schema
| unknown | mit | imbo/imbo,imbo/imbo | unknown | ## Code Before:
<?xml version="1.0" encoding="UTF-8"?>
<phpunit xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:noNamespaceSchemaLocation="https://schema.phpunit.de/8.5/phpunit.xsd"
bootstrap="vendor/autoload.php"
executionOrder="depends,defects"
forceCoversAnnotation="true"
beStrictAboutCoversAnnotation="false"
beStrictAboutOutputDuringTests="true"
beStrictAboutTodoAnnotatedTests="false"
verbose="true">
<testsuites>
<testsuite name="imbo-unit">
<directory suffix="Test.php">tests</directory>
</testsuite>
</testsuites>
<php>
<!-- General config -->
<const name="FIXTURES_DIR" value="./tests/Fixtures" />
<const name="PROJECT_ROOT" value="./" />
<const name="DATA_DIR" value="./data" />
</php>
<filter>
<whitelist processUncoveredFilesFromWhitelist="true">
<directory suffix=".php">src</directory>
<directory>tests/behat/features/bootstrap</directory>
</whitelist>
</filter>
<groups>
<exclude>
<group>local</group>
</exclude>
</groups>
</phpunit>
## Instruction:
Update PHPUnit configuration to match latest schema
## Code After:
<?xml version="1.0" encoding="UTF-8"?>
<phpunit
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:noNamespaceSchemaLocation="https://schema.phpunit.de/9.3/phpunit.xsd"
bootstrap="vendor/autoload.php"
executionOrder="depends,defects"
forceCoversAnnotation="true"
beStrictAboutCoversAnnotation="false"
beStrictAboutOutputDuringTests="true"
beStrictAboutTodoAnnotatedTests="false"
verbose="true"
>
<coverage processUncoveredFiles="true">
<include>
<directory suffix=".php">src</directory>
<directory>tests/behat/features/bootstrap</directory>
</include>
</coverage>
<testsuites>
<testsuite name="imbo-unit">
<directory suffix="Test.php">tests</directory>
</testsuite>
</testsuites>
<php>
<!-- General config -->
<const name="FIXTURES_DIR" value="./tests/Fixtures"/>
<const name="PROJECT_ROOT" value="./"/>
<const name="DATA_DIR" value="./data"/>
</php>
<groups>
<exclude>
<group>local</group>
</exclude>
</groups>
</phpunit>
|
88326523e416595a600d1abce5f4da0b95ed9bb4 | lib/paperclip_processors/metadata_extractor.rb | lib/paperclip_processors/metadata_extractor.rb | module Paperclip
class MetadataExtractor < Paperclip::Processor
def make
# get the metadata from Yamdi
metadata = Yamdi.new(file.path)
# add values to the attachment instance (for db persistence, etc)
# assumes duration is a column in the table this attachment is being
# added to.
attachment.instance.duration = metadata.duration
# always return a reference to the file when done
file
end
end
end | module Paperclip
class MetadataExtractor < Paperclip::Processor
def make
# get the metadata from Yamdi
metadata = Yamdi.new(file.path)
# add values to the attachment instance (for db persistence, etc)
# assumes duration is a column in the table this attachment is being
# added to.
# a simple assignment works if you are not background processing your
# paperclip processing
attachment.instance.duration = metadata.duration
# if you are background processing your paperclip processing which is
# common when working with videos you'll want to ensure the instance gets
# saved at some point in the processor chain, use:
# attachment.instance.update_attribute(:duration, metadata.duration)
# always return a reference to the file when done
file
end
end
end | Add documentation for paperclip post processor and ensuring the attachment instance is saved when using background paperclip processing. | Add documentation for paperclip post processor and ensuring the attachment instance is saved when using background paperclip processing. | Ruby | mit | peregrinator/yamdi | ruby | ## Code Before:
module Paperclip
class MetadataExtractor < Paperclip::Processor
def make
# get the metadata from Yamdi
metadata = Yamdi.new(file.path)
# add values to the attachment instance (for db persistence, etc)
# assumes duration is a column in the table this attachment is being
# added to.
attachment.instance.duration = metadata.duration
# always return a reference to the file when done
file
end
end
end
## Instruction:
Add documentation for paperclip post processor and ensuring the attachment instance is saved when using background paperclip processing.
## Code After:
module Paperclip
class MetadataExtractor < Paperclip::Processor
def make
# get the metadata from Yamdi
metadata = Yamdi.new(file.path)
# add values to the attachment instance (for db persistence, etc)
# assumes duration is a column in the table this attachment is being
# added to.
# a simple assignment works if you are not background processing your
# paperclip processing
attachment.instance.duration = metadata.duration
# if you are background processing your paperclip processing which is
# common when working with videos you'll want to ensure the instance gets
# saved at some point in the processor chain, use:
# attachment.instance.update_attribute(:duration, metadata.duration)
# always return a reference to the file when done
file
end
end
end |
7517d7674098ff9544c46c0e717f7dd87a6abb3c | test/rain/deployer_test.rb | test/rain/deployer_test.rb | require 'test_helper'
class Rain::DeployerTest < ActiveSupport::TestCase
describe "DeployerTest: bare invocation" do
setup { @command = %x(./bin/rain) }
should "deploy a new tag to stage" do
skip "suck it"
assert_match 'Got a handful of stacks better grab an umbrella', @command
end
should "deploy the same tag that's on stage to production" do
assert_match 'jflksjflksjfl;', @command
end
end
describe "DeployerTest: help invocation for 'on'" do
before { @command = %x(./bin/rain help on) }
should "prompt for an environment" do
assert_match 'rain on ENVIRONMENT', @command
end
should "be incrementable by patch version" do
assert_match '--patch', @command
end
should "be incrementable by minor version" do
assert_match '--minor', @command
end
should "be incrementable by major version" do
assert_match '--major', @command
end
end
end
| require 'test_helper'
class Rain::DeployerTest < ActiveSupport::TestCase
describe "DeployerTest: bare invocation" do
before { @command ||= %x(./bin/rain) }
should "deploy to production" do
assert_match 'Got a handful of stacks better grab an umbrella', @command
end
end
describe "DeployerTest: specific environment invocation" do
context "on stage" do
before { @command ||= %x(./bin/rain on stage) }
should "deploy a new tag to stage" do
assert_match 'Deploying existing tag', @command
end
end
context "on production" do
before { @command ||= %x(./bin/rain on production) }
should "deploy the same tag that's on stage to production" do
assert_match 'Deploying existing tag', @command
end
end
end
describe "DeployerTest: help invocation for 'on'" do
before { @command ||= %x(./bin/rain help on) }
should "prompt for an environment" do
assert_match 'rain on ENVIRONMENT', @command
end
should "be incrementable by patch version" do
assert_match '--patch', @command
end
should "be incrementable by minor version" do
assert_match '--minor', @command
end
should "be incrementable by major version" do
assert_match '--major', @command
end
end
end
| Add tests for on stage and on prod | Add tests for on stage and on prod
| Ruby | mit | eLocal/rain | ruby | ## Code Before:
require 'test_helper'
class Rain::DeployerTest < ActiveSupport::TestCase
describe "DeployerTest: bare invocation" do
setup { @command = %x(./bin/rain) }
should "deploy a new tag to stage" do
skip "suck it"
assert_match 'Got a handful of stacks better grab an umbrella', @command
end
should "deploy the same tag that's on stage to production" do
assert_match 'jflksjflksjfl;', @command
end
end
describe "DeployerTest: help invocation for 'on'" do
before { @command = %x(./bin/rain help on) }
should "prompt for an environment" do
assert_match 'rain on ENVIRONMENT', @command
end
should "be incrementable by patch version" do
assert_match '--patch', @command
end
should "be incrementable by minor version" do
assert_match '--minor', @command
end
should "be incrementable by major version" do
assert_match '--major', @command
end
end
end
## Instruction:
Add tests for on stage and on prod
## Code After:
require 'test_helper'
class Rain::DeployerTest < ActiveSupport::TestCase
describe "DeployerTest: bare invocation" do
before { @command ||= %x(./bin/rain) }
should "deploy to production" do
assert_match 'Got a handful of stacks better grab an umbrella', @command
end
end
describe "DeployerTest: specific environment invocation" do
context "on stage" do
before { @command ||= %x(./bin/rain on stage) }
should "deploy a new tag to stage" do
assert_match 'Deploying existing tag', @command
end
end
context "on production" do
before { @command ||= %x(./bin/rain on production) }
should "deploy the same tag that's on stage to production" do
assert_match 'Deploying existing tag', @command
end
end
end
describe "DeployerTest: help invocation for 'on'" do
before { @command ||= %x(./bin/rain help on) }
should "prompt for an environment" do
assert_match 'rain on ENVIRONMENT', @command
end
should "be incrementable by patch version" do
assert_match '--patch', @command
end
should "be incrementable by minor version" do
assert_match '--minor', @command
end
should "be incrementable by major version" do
assert_match '--major', @command
end
end
end
|
fcdd0b808419bb32cd5b35d3467b0907bc81efd4 | SpriteSheetPacker/main.cpp | SpriteSheetPacker/main.cpp |
int commandLine(QCoreApplication& app);
int main(int argc, char *argv[])
{
QApplication app(argc, argv);
#ifdef Q_OS_WIN32
QApplication::setStyle(QStyleFactory::create("Fusion"));
#endif
QCoreApplication::setOrganizationName("amakaseev");
QCoreApplication::setOrganizationDomain("spicyminds-lab.com");
QCoreApplication::setApplicationName("SpriteSheetPacker");
QCoreApplication::setApplicationVersion("1.0.3");
//QDir::setCurrent(QApplication::applicationDirPath());
SpritePackerProjectFile::factory().set<SpritePackerProjectFile>("json");
SpritePackerProjectFile::factory().set<SpritePackerProjectFileOLD>("sp");
SpritePackerProjectFile::factory().set<SpritePackerProjectFileTPS>("tps");
if (argc > 1) {
return commandLine(app);
} else {
MainWindow w;
w.show();
return app.exec();
}
}
|
int commandLine(QCoreApplication& app);
int main(int argc, char *argv[])
{
QApplication app(argc, argv);
QCoreApplication::setOrganizationName("amakaseev");
QCoreApplication::setOrganizationDomain("spicyminds-lab.com");
QCoreApplication::setApplicationName("SpriteSheetPacker");
QCoreApplication::setApplicationVersion("1.0.3");
//QDir::setCurrent(QApplication::applicationDirPath());
SpritePackerProjectFile::factory().set<SpritePackerProjectFile>("json");
SpritePackerProjectFile::factory().set<SpritePackerProjectFileOLD>("sp");
SpritePackerProjectFile::factory().set<SpritePackerProjectFileTPS>("tps");
if (argc > 1) {
return commandLine(app);
} else {
MainWindow w;
w.show();
return app.exec();
}
}
| Use windows native look and feel | Use windows native look and feel | C++ | mit | amakaseev/sprite-sheet-packer,TheCodez/sprite-sheet-packer,amakaseev/sprite-sheet-packer,TheCodez/sprite-sheet-packer,TheCodez/sprite-sheet-packer,amakaseev/sprite-sheet-packer,amakaseev/sprite-sheet-packer,TheCodez/sprite-sheet-packer | c++ | ## Code Before:
int commandLine(QCoreApplication& app);
int main(int argc, char *argv[])
{
QApplication app(argc, argv);
#ifdef Q_OS_WIN32
QApplication::setStyle(QStyleFactory::create("Fusion"));
#endif
QCoreApplication::setOrganizationName("amakaseev");
QCoreApplication::setOrganizationDomain("spicyminds-lab.com");
QCoreApplication::setApplicationName("SpriteSheetPacker");
QCoreApplication::setApplicationVersion("1.0.3");
//QDir::setCurrent(QApplication::applicationDirPath());
SpritePackerProjectFile::factory().set<SpritePackerProjectFile>("json");
SpritePackerProjectFile::factory().set<SpritePackerProjectFileOLD>("sp");
SpritePackerProjectFile::factory().set<SpritePackerProjectFileTPS>("tps");
if (argc > 1) {
return commandLine(app);
} else {
MainWindow w;
w.show();
return app.exec();
}
}
## Instruction:
Use windows native look and feel
## Code After:
int commandLine(QCoreApplication& app);
int main(int argc, char *argv[])
{
QApplication app(argc, argv);
QCoreApplication::setOrganizationName("amakaseev");
QCoreApplication::setOrganizationDomain("spicyminds-lab.com");
QCoreApplication::setApplicationName("SpriteSheetPacker");
QCoreApplication::setApplicationVersion("1.0.3");
//QDir::setCurrent(QApplication::applicationDirPath());
SpritePackerProjectFile::factory().set<SpritePackerProjectFile>("json");
SpritePackerProjectFile::factory().set<SpritePackerProjectFileOLD>("sp");
SpritePackerProjectFile::factory().set<SpritePackerProjectFileTPS>("tps");
if (argc > 1) {
return commandLine(app);
} else {
MainWindow w;
w.show();
return app.exec();
}
}
|
751f81eec5dc82721fd5185c908dc8e7b604accf | app/views/_header.erb | app/views/_header.erb | <nav>
<li><a href="/login">Login</a></li>
<li><a href="/logout">Logout</a></li>
<li><a href="/register">Register</a></li>
<% if auth_logged_in? %>
<li>Username: <%= auth_current_user.username %></li>
<% end %>
</nav>
| <nav>
<li><a href="/login">Login</a></li>
<li><a href="/logout">Logout</a></li>
<li><a href="/register">Register</a></li>
<li><a href="/profile/<%= auth_current_user.id %>/edit">Edit</a></li>
<% if auth_logged_in? %>
<li>Username: <%= auth_current_user.username %></li>
<% end %>
</nav>
| Add edit link on header layout | Add edit link on header layout
| HTML+ERB | mit | philipyoo/basic-rpg,philipyoo/basic-rpg,philipyoo/basic-rpg | html+erb | ## Code Before:
<nav>
<li><a href="/login">Login</a></li>
<li><a href="/logout">Logout</a></li>
<li><a href="/register">Register</a></li>
<% if auth_logged_in? %>
<li>Username: <%= auth_current_user.username %></li>
<% end %>
</nav>
## Instruction:
Add edit link on header layout
## Code After:
<nav>
<li><a href="/login">Login</a></li>
<li><a href="/logout">Logout</a></li>
<li><a href="/register">Register</a></li>
<li><a href="/profile/<%= auth_current_user.id %>/edit">Edit</a></li>
<% if auth_logged_in? %>
<li>Username: <%= auth_current_user.username %></li>
<% end %>
</nav>
|
3f037595535905890903e3eb57be2dba9d7091cf | src/MindOfMicah/LaravelDatatables/Datatable.php | src/MindOfMicah/LaravelDatatables/Datatable.php | <?php
namespace MindOfMicah\LaravelDatatables;
use Illuminate\Http\JsonResponse;
class Datatable
{
protected $model;
protected $columns;
public function __construct($a) {
$this->a = $a;
}
public function asJsonResponse()
{
$data = [];
$total = $amount_displayed = 0;
if ($this->model) {
$model_name = $this->model;
if ($this->columns) {
}
$sql = $model_name::query()->select($this->columns ?: '*');
$total = ($sql->count());
$models = $sql->take($this->a->input('length'))->get();
$data = $models->toArray();
$total = $total;
$amount_displayed = count($models);
}
$data = ([
'aaData'=>$data,
'iTotalRecords'=>$total,
'iTotalDisplayRecords'=>$amount_displayed
]);
return new JsonResponse($data, 200);
}
public function forEloquentModel($model)
{
$this->model = $model;
return $this;
}
public function pluckColumns($argument1)
{
$this->columns[] = $argument1;
return $this;
}
}
| <?php
namespace MindOfMicah\LaravelDatatables;
use Illuminate\Http\JsonResponse;
class Datatable
{
protected $model;
protected $columns;
public function __construct($a) {
$this->a = $a;
}
public function asJsonResponse()
{
$data = [];
$total = $amount_displayed = 0;
if ($this->model) {
$model_name = $this->model;
if ($this->columns) {
}
$sql = $model_name::query()->select($this->columns ?: '*');
$total = ($sql->count());
$sql->skip($this->a->input('start'));
$models = $sql->take($this->a->input('length'))->get();
$data = $models->toArray();
$total = $total;
$amount_displayed = count($models);
}
$data = ([
'aaData'=>$data,
'iTotalRecords'=>$total,
'iTotalDisplayRecords'=>$amount_displayed
]);
return new JsonResponse($data, 200);
}
public function forEloquentModel($model)
{
$this->model = $model;
return $this;
}
public function pluckColumns($argument1)
{
$this->columns[] = $argument1;
return $this;
}
}
| Set a starting point for the datatables | Set a starting point for the datatables
| PHP | mit | mindofmicah/laravel-datatables,mindofmicah/laravel-datatables | php | ## Code Before:
<?php
namespace MindOfMicah\LaravelDatatables;
use Illuminate\Http\JsonResponse;
class Datatable
{
protected $model;
protected $columns;
public function __construct($a) {
$this->a = $a;
}
public function asJsonResponse()
{
$data = [];
$total = $amount_displayed = 0;
if ($this->model) {
$model_name = $this->model;
if ($this->columns) {
}
$sql = $model_name::query()->select($this->columns ?: '*');
$total = ($sql->count());
$models = $sql->take($this->a->input('length'))->get();
$data = $models->toArray();
$total = $total;
$amount_displayed = count($models);
}
$data = ([
'aaData'=>$data,
'iTotalRecords'=>$total,
'iTotalDisplayRecords'=>$amount_displayed
]);
return new JsonResponse($data, 200);
}
public function forEloquentModel($model)
{
$this->model = $model;
return $this;
}
public function pluckColumns($argument1)
{
$this->columns[] = $argument1;
return $this;
}
}
## Instruction:
Set a starting point for the datatables
## Code After:
<?php
namespace MindOfMicah\LaravelDatatables;
use Illuminate\Http\JsonResponse;
class Datatable
{
protected $model;
protected $columns;
public function __construct($a) {
$this->a = $a;
}
public function asJsonResponse()
{
$data = [];
$total = $amount_displayed = 0;
if ($this->model) {
$model_name = $this->model;
if ($this->columns) {
}
$sql = $model_name::query()->select($this->columns ?: '*');
$total = ($sql->count());
$sql->skip($this->a->input('start'));
$models = $sql->take($this->a->input('length'))->get();
$data = $models->toArray();
$total = $total;
$amount_displayed = count($models);
}
$data = ([
'aaData'=>$data,
'iTotalRecords'=>$total,
'iTotalDisplayRecords'=>$amount_displayed
]);
return new JsonResponse($data, 200);
}
public function forEloquentModel($model)
{
$this->model = $model;
return $this;
}
public function pluckColumns($argument1)
{
$this->columns[] = $argument1;
return $this;
}
}
|
633af9808fc9502f652708d74f1ce32b2b8107e4 | OctoKit/Time.swift | OctoKit/Time.swift | import Foundation
struct Time {
/// A date formatter for RFC 3339 style timestamps. Uses POSIX locale and GMT timezone so that date
/// values are parsed as absolutes.
/// - https://tools.ietf.org/html/rfc3339
/// - https://developer.apple.com/library/mac/qa/qa1480/_index.html
/// - https://developer.apple.com/library/ios/documentation/Cocoa/Conceptual/DataFormatting/Articles/dfDateFormatting10_4.html
private static var rfc3339DateFormatter: NSDateFormatter = {
let formatter = NSDateFormatter()
formatter.dateFormat = "yyyy'-'MM'-'dd'T'HH':'mm':'ss'Z'"
formatter.locale = NSLocale(localeIdentifier: "en_US_POSIX")
formatter.timeZone = NSTimeZone(forSecondsFromGMT: 0)
return formatter
}()
static func rfc3339Date(string: String?) -> NSDate? {
guard let string = string else { return nil }
return Time.rfc3339DateFormatter.dateFromString(string)
}
}
| import Foundation
struct Time {
/**
A date formatter for RFC 3339 style timestamps. Uses POSIX locale and GMT timezone so that date values are parsed as absolutes.
- [https://tools.ietf.org/html/rfc3339](https://tools.ietf.org/html/rfc3339)
- [https://developer.apple.com/library/mac/qa/qa1480/_index.html](https://developer.apple.com/library/mac/qa/qa1480/_index.html)
- [https://developer.apple.com/library/ios/documentation/Cocoa/Conceptual/DataFormatting/Articles/dfDateFormatting10_4.html](https://developer.apple.com/library/ios/documentation/Cocoa/Conceptual/DataFormatting/Articles/dfDateFormatting10_4.html)
*/
private static var rfc3339DateFormatter: NSDateFormatter = {
let formatter = NSDateFormatter()
formatter.dateFormat = "yyyy'-'MM'-'dd'T'HH':'mm':'ss'Z'"
formatter.locale = NSLocale(localeIdentifier: "en_US_POSIX")
formatter.timeZone = NSTimeZone(forSecondsFromGMT: 0)
return formatter
}()
/**
Parses RFC 3339 date strings into NSDate
- parameter string: The string representation of the date
- returns: An `NSDate` with a successful parse, otherwise `nil`
*/
static func rfc3339Date(string: String?) -> NSDate? {
guard let string = string else { return nil }
return Time.rfc3339DateFormatter.dateFromString(string)
}
}
| Add documentation for time utils | Add documentation for time utils
| Swift | mit | nerdishbynature/octokit.swift,phatblat/octokit.swift,phatblat/octokit.swift,nerdishbynature/octokit.swift,nerdishbynature/octokit.swift | swift | ## Code Before:
import Foundation
struct Time {
/// A date formatter for RFC 3339 style timestamps. Uses POSIX locale and GMT timezone so that date
/// values are parsed as absolutes.
/// - https://tools.ietf.org/html/rfc3339
/// - https://developer.apple.com/library/mac/qa/qa1480/_index.html
/// - https://developer.apple.com/library/ios/documentation/Cocoa/Conceptual/DataFormatting/Articles/dfDateFormatting10_4.html
private static var rfc3339DateFormatter: NSDateFormatter = {
let formatter = NSDateFormatter()
formatter.dateFormat = "yyyy'-'MM'-'dd'T'HH':'mm':'ss'Z'"
formatter.locale = NSLocale(localeIdentifier: "en_US_POSIX")
formatter.timeZone = NSTimeZone(forSecondsFromGMT: 0)
return formatter
}()
static func rfc3339Date(string: String?) -> NSDate? {
guard let string = string else { return nil }
return Time.rfc3339DateFormatter.dateFromString(string)
}
}
## Instruction:
Add documentation for time utils
## Code After:
import Foundation
struct Time {
/**
A date formatter for RFC 3339 style timestamps. Uses POSIX locale and GMT timezone so that date values are parsed as absolutes.
- [https://tools.ietf.org/html/rfc3339](https://tools.ietf.org/html/rfc3339)
- [https://developer.apple.com/library/mac/qa/qa1480/_index.html](https://developer.apple.com/library/mac/qa/qa1480/_index.html)
- [https://developer.apple.com/library/ios/documentation/Cocoa/Conceptual/DataFormatting/Articles/dfDateFormatting10_4.html](https://developer.apple.com/library/ios/documentation/Cocoa/Conceptual/DataFormatting/Articles/dfDateFormatting10_4.html)
*/
private static var rfc3339DateFormatter: NSDateFormatter = {
let formatter = NSDateFormatter()
formatter.dateFormat = "yyyy'-'MM'-'dd'T'HH':'mm':'ss'Z'"
formatter.locale = NSLocale(localeIdentifier: "en_US_POSIX")
formatter.timeZone = NSTimeZone(forSecondsFromGMT: 0)
return formatter
}()
/**
Parses RFC 3339 date strings into NSDate
- parameter string: The string representation of the date
- returns: An `NSDate` with a successful parse, otherwise `nil`
*/
static func rfc3339Date(string: String?) -> NSDate? {
guard let string = string else { return nil }
return Time.rfc3339DateFormatter.dateFromString(string)
}
}
|
a3f1e58d6fe984287b38154b816189a2ffb7ac8b | src/sap.ui.core/test/sap/ui/core/demokit/sample/Commands/manifest.json | src/sap.ui.core/test/sap/ui/core/demokit/sample/Commands/manifest.json | {
"_version": "1.12.0",
"sap.app": {
"id": "sap.ui.core.sample.Commands",
"type": "application"
},
"sap.ui5": {
"dependencies": {
"libs": {
"sap.ui.core": {},
"sap.m": {}
}
},
"commands": {
"Save": {
"name": "Save",
"icon": "sap-icon://save",
"caption": "test",
"tooltip": "test",
"shortcut": "Ctrl+S"
}
},
"models": {
"viewModel": {
"type": "sap.ui.model.json.JSONModel"
}
},
"rootView": {
"viewName": "sap.ui.core.sample.Commands.Commands",
"type": "XML",
"async": true,
"id": "commands"
},
"config": {
"sample": {
"files": [
"Component.js",
"Commands.view.xml",
"Commands.controller.js",
"manifest.json"
]
}
}
}
} | {
"_version": "1.12.0",
"sap.app": {
"id": "sap.ui.core.sample.Commands",
"type": "application"
},
"sap.ui5": {
"dependencies": {
"libs": {
"sap.ui.core": {},
"sap.m": {}
}
},
"commands": {
"Save": {
"shortcut": "Ctrl+S"
}
},
"models": {
"viewModel": {
"type": "sap.ui.model.json.JSONModel"
}
},
"rootView": {
"viewName": "sap.ui.core.sample.Commands.Commands",
"type": "XML",
"async": true,
"id": "commands"
},
"config": {
"sample": {
"files": [
"Component.js",
"Commands.view.xml",
"Commands.controller.js",
"manifest.json"
]
}
}
}
} | Remove unused properties from sample | [INTERNAL] commands: Remove unused properties from sample
Change-Id: I2c4cd3ebf69ffb9ef064efb49dd93193a88604df
| JSON | apache-2.0 | SAP/openui5,SAP/openui5,SAP/openui5,SAP/openui5 | json | ## Code Before:
{
"_version": "1.12.0",
"sap.app": {
"id": "sap.ui.core.sample.Commands",
"type": "application"
},
"sap.ui5": {
"dependencies": {
"libs": {
"sap.ui.core": {},
"sap.m": {}
}
},
"commands": {
"Save": {
"name": "Save",
"icon": "sap-icon://save",
"caption": "test",
"tooltip": "test",
"shortcut": "Ctrl+S"
}
},
"models": {
"viewModel": {
"type": "sap.ui.model.json.JSONModel"
}
},
"rootView": {
"viewName": "sap.ui.core.sample.Commands.Commands",
"type": "XML",
"async": true,
"id": "commands"
},
"config": {
"sample": {
"files": [
"Component.js",
"Commands.view.xml",
"Commands.controller.js",
"manifest.json"
]
}
}
}
}
## Instruction:
[INTERNAL] commands: Remove unused properties from sample
Change-Id: I2c4cd3ebf69ffb9ef064efb49dd93193a88604df
## Code After:
{
"_version": "1.12.0",
"sap.app": {
"id": "sap.ui.core.sample.Commands",
"type": "application"
},
"sap.ui5": {
"dependencies": {
"libs": {
"sap.ui.core": {},
"sap.m": {}
}
},
"commands": {
"Save": {
"shortcut": "Ctrl+S"
}
},
"models": {
"viewModel": {
"type": "sap.ui.model.json.JSONModel"
}
},
"rootView": {
"viewName": "sap.ui.core.sample.Commands.Commands",
"type": "XML",
"async": true,
"id": "commands"
},
"config": {
"sample": {
"files": [
"Component.js",
"Commands.view.xml",
"Commands.controller.js",
"manifest.json"
]
}
}
}
} |
c4be964b9645261cebb142924288be7fad295a6d | setup.py | setup.py |
from setuptools import find_packages, setup
from platformio import (__author__, __description__, __email__, __license__,
__title__, __url__, __version__)
setup(
name=__title__,
version=__version__,
description=__description__,
long_description=open("README.rst").read(),
author=__author__,
author_email=__email__,
url=__url__,
license=__license__,
install_requires=[
"click",
"colorama",
"pyserial",
"requests",
# "SCons"
],
packages=find_packages(),
package_data={"platformio": ["*.ini"]},
entry_points={
"console_scripts": [
"platformio = platformio.__main__:main"
]
},
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Console",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: C",
"Programming Language :: Python",
"Topic :: Software Development",
"Topic :: Software Development :: Build Tools",
"Topic :: Software Development :: Compilers"
]
)
|
from sys import platform as sysplatform
from setuptools import find_packages, setup
from platformio import (__author__, __description__, __email__, __license__,
__title__, __url__, __version__)
setup(
name=__title__,
version=__version__,
description=__description__,
long_description=open("README.rst").read(),
author=__author__,
author_email=__email__,
url=__url__,
license=__license__,
install_requires=[
"click",
"pyserial",
"requests",
# "SCons"
] + (["colorama"] if sysplatform.startswith("win") else []),
packages=find_packages(),
package_data={"platformio": ["*.ini"]},
entry_points={
"console_scripts": [
"platformio = platformio.__main__:main"
]
},
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Console",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: C",
"Programming Language :: Python",
"Topic :: Software Development",
"Topic :: Software Development :: Build Tools",
"Topic :: Software Development :: Compilers"
]
)
| Install "colorama" if windows platform | Install "colorama" if windows platform
| Python | apache-2.0 | platformio/platformio-core,mplewis/platformio,eiginn/platformio,awong1900/platformio,dkuku/platformio,bkudria/platformio,bkudria/platformio,aphelps/platformio,TimJay/platformio,mseroczynski/platformio,awong1900/platformio,awong1900/platformio,platformio/platformio-core,bkudria/platformio,jrobeson/platformio,platformio/platformio,aphelps/platformio,atyenoria/platformio,jrobeson/platformio,bkudria/platformio,aphelps/platformio,TimJay/platformio,jrobeson/platformio,mcanthony/platformio,jrobeson/platformio,TimJay/platformio,ZachMassia/platformio,valeros/platformio,aphelps/platformio,TimJay/platformio,TimJay/platformio | python | ## Code Before:
from setuptools import find_packages, setup
from platformio import (__author__, __description__, __email__, __license__,
__title__, __url__, __version__)
setup(
name=__title__,
version=__version__,
description=__description__,
long_description=open("README.rst").read(),
author=__author__,
author_email=__email__,
url=__url__,
license=__license__,
install_requires=[
"click",
"colorama",
"pyserial",
"requests",
# "SCons"
],
packages=find_packages(),
package_data={"platformio": ["*.ini"]},
entry_points={
"console_scripts": [
"platformio = platformio.__main__:main"
]
},
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Console",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: C",
"Programming Language :: Python",
"Topic :: Software Development",
"Topic :: Software Development :: Build Tools",
"Topic :: Software Development :: Compilers"
]
)
## Instruction:
Install "colorama" if windows platform
## Code After:
from sys import platform as sysplatform
from setuptools import find_packages, setup
from platformio import (__author__, __description__, __email__, __license__,
__title__, __url__, __version__)
setup(
name=__title__,
version=__version__,
description=__description__,
long_description=open("README.rst").read(),
author=__author__,
author_email=__email__,
url=__url__,
license=__license__,
install_requires=[
"click",
"pyserial",
"requests",
# "SCons"
] + (["colorama"] if sysplatform.startswith("win") else []),
packages=find_packages(),
package_data={"platformio": ["*.ini"]},
entry_points={
"console_scripts": [
"platformio = platformio.__main__:main"
]
},
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Console",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: C",
"Programming Language :: Python",
"Topic :: Software Development",
"Topic :: Software Development :: Build Tools",
"Topic :: Software Development :: Compilers"
]
)
|
1e1d46ae7f7250fe068c6048ba2323feee2f6277 | documentation/assertions/any/with-xhr-mocked-out.md | documentation/assertions/any/with-xhr-mocked-out.md | Mock out uses of `XMLHttpRequest` within the browser.
Let's say we've got some XHR code. In the example below we'll use
[unexpected-http](https://github.com/unexpectedjs/unexpected-http/)
to give us a nice syntax for issuing XHR requests, but in practice you'd be
wrapping your client side side e.g. `jQuery.ajax()` calls.
```js#evaluate:false
expect.use(require('unexpected-http'));
```
Mock Responses
--------------
Unexpected-mXHR allows declaratively specifying responses to return for XHR
requests as follows:
```js#evaluate:false
describe('the basics', function () {
return expect('http://www.google.com/', 'with xhr mocked out', {
request: 'GET /',
response: {
statusCode: 200,
headers: {
'Content-Type': 'text/html; charset=UTF-8'
},
body: '<!DOCTYPE html>\n<html></html>'
}
}, 'to yield response', {
statusCode: 200,
headers: {
'Content-Type': 'text/html; charset=UTF-8'
},
body: '<!DOCTYPE html>\n<html></html>'
});
});
```
As with `unexpected-mitm`, the request we expected to receive can be
| Mock out uses of `XMLHttpRequest` within the browser.
Let's say we've got some XHR code. In the example below we'll use
[unexpected-http](https://github.com/unexpectedjs/unexpected-http/)
to give us a nice syntax for issuing XHR requests, but in practice you'd be
wrapping your client side side e.g. `jQuery.ajax()` calls.
```js#evaluate:false
expect.use(require('unexpected-http'));
```
Mock Responses
--------------
Unexpected-mXHR allows declaratively specifying responses to return for XHR
requests as follows:
```js#evaluate:false
describe('the basics', function () {
return expect('http://www.google.com/', 'with xhr mocked out', {
request: 'GET /',
response: {
statusCode: 200,
headers: {
'Content-Type': 'text/html; charset=UTF-8'
},
body: '<!DOCTYPE html>\n<html></html>'
}
}, 'to yield response', {
statusCode: 200,
headers: {
'Content-Type': 'text/html; charset=UTF-8'
},
body: '<!DOCTYPE html>\n<html></html>'
});
});
```
As with `unexpected-mitm`, the request we expected to receive can be checked
against expectations and having succeeded the response we define is returned.
| Complete sentence in the documentation. | Complete sentence in the documentation.
| Markdown | bsd-3-clause | alexjeffburke/unexpected-mxhr,alexjeffburke/unexpected-mxhr | markdown | ## Code Before:
Mock out uses of `XMLHttpRequest` within the browser.
Let's say we've got some XHR code. In the example below we'll use
[unexpected-http](https://github.com/unexpectedjs/unexpected-http/)
to give us a nice syntax for issuing XHR requests, but in practice you'd be
wrapping your client side side e.g. `jQuery.ajax()` calls.
```js#evaluate:false
expect.use(require('unexpected-http'));
```
Mock Responses
--------------
Unexpected-mXHR allows declaratively specifying responses to return for XHR
requests as follows:
```js#evaluate:false
describe('the basics', function () {
return expect('http://www.google.com/', 'with xhr mocked out', {
request: 'GET /',
response: {
statusCode: 200,
headers: {
'Content-Type': 'text/html; charset=UTF-8'
},
body: '<!DOCTYPE html>\n<html></html>'
}
}, 'to yield response', {
statusCode: 200,
headers: {
'Content-Type': 'text/html; charset=UTF-8'
},
body: '<!DOCTYPE html>\n<html></html>'
});
});
```
As with `unexpected-mitm`, the request we expected to receive can be
## Instruction:
Complete sentence in the documentation.
## Code After:
Mock out uses of `XMLHttpRequest` within the browser.
Let's say we've got some XHR code. In the example below we'll use
[unexpected-http](https://github.com/unexpectedjs/unexpected-http/)
to give us a nice syntax for issuing XHR requests, but in practice you'd be
wrapping your client side side e.g. `jQuery.ajax()` calls.
```js#evaluate:false
expect.use(require('unexpected-http'));
```
Mock Responses
--------------
Unexpected-mXHR allows declaratively specifying responses to return for XHR
requests as follows:
```js#evaluate:false
describe('the basics', function () {
return expect('http://www.google.com/', 'with xhr mocked out', {
request: 'GET /',
response: {
statusCode: 200,
headers: {
'Content-Type': 'text/html; charset=UTF-8'
},
body: '<!DOCTYPE html>\n<html></html>'
}
}, 'to yield response', {
statusCode: 200,
headers: {
'Content-Type': 'text/html; charset=UTF-8'
},
body: '<!DOCTYPE html>\n<html></html>'
});
});
```
As with `unexpected-mitm`, the request we expected to receive can be checked
against expectations and having succeeded the response we define is returned.
|
ae43fd8cacc01b74ddaa5718a41d58387bd35418 | .travis.yml | .travis.yml | language: node_js
node_js:
- "0.10"
before_script:
- "IOJS_VERSION=v1.0.1; wget https://iojs.org/dist/${IOJS_VERSION}/iojs-${IOJS_VERSION}-linux-x64.tar.xz && tar xvfJ iojs-${IOJS_VERSION}-linux-x64.tar.xz && sudo mv iojs-${IOJS_VERSION}-linux-x64/bin/iojs /usr/local/bin"
script:
- "npm run build && npm test"
- "alias node=iojs; npm run build && npm test"
after_script:
- "istanbul cover --verbose --dir coverage node_modules/.bin/_mocha tests/tests.js -- -u exports -R spec && cat coverage/lcov.info | coveralls; rm -rf coverage/lcov*"
| language: node_js
node_js:
- "0.10"
- "0.12"
- "iojs"
script:
- "npm run build && npm test"
after_script:
- "istanbul cover --verbose --dir coverage node_modules/.bin/_mocha tests/tests.js -- -u exports -R spec && cat coverage/lcov.info | coveralls; rm -rf coverage/lcov*"
| Test in io.js and Node.js v0.12.x | Test in io.js and Node.js v0.12.x
| YAML | mit | mathiasbynens/regexpu | yaml | ## Code Before:
language: node_js
node_js:
- "0.10"
before_script:
- "IOJS_VERSION=v1.0.1; wget https://iojs.org/dist/${IOJS_VERSION}/iojs-${IOJS_VERSION}-linux-x64.tar.xz && tar xvfJ iojs-${IOJS_VERSION}-linux-x64.tar.xz && sudo mv iojs-${IOJS_VERSION}-linux-x64/bin/iojs /usr/local/bin"
script:
- "npm run build && npm test"
- "alias node=iojs; npm run build && npm test"
after_script:
- "istanbul cover --verbose --dir coverage node_modules/.bin/_mocha tests/tests.js -- -u exports -R spec && cat coverage/lcov.info | coveralls; rm -rf coverage/lcov*"
## Instruction:
Test in io.js and Node.js v0.12.x
## Code After:
language: node_js
node_js:
- "0.10"
- "0.12"
- "iojs"
script:
- "npm run build && npm test"
after_script:
- "istanbul cover --verbose --dir coverage node_modules/.bin/_mocha tests/tests.js -- -u exports -R spec && cat coverage/lcov.info | coveralls; rm -rf coverage/lcov*"
|
83f97dc8204133141365908c7ff13c3973f427e6 | tools/llvm-xray/llvm-xray.cc | tools/llvm-xray/llvm-xray.cc | //===- llvm-xray.cc - XRay Tool Main Program ------------------------------===//
//
// The LLVM Compiler Infrastructure
//
// This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
//
// This file implements the main entry point for the suite of XRay tools. All
// additional functionality are implemented as subcommands.
//
//===----------------------------------------------------------------------===//
//
// Basic usage:
//
// llvm-xray [options] <subcommand> [subcommand-specific options]
//
#include "xray-registry.h"
#include "llvm/Support/CommandLine.h"
#include "llvm/Support/FileSystem.h"
#include "llvm/Support/raw_ostream.h"
#include <unistd.h>
using namespace llvm;
using namespace llvm::xray;
int main(int argc, char *argv[]) {
cl::ParseCommandLineOptions(argc, argv,
"XRay Tools\n\n"
" This program consolidates multiple XRay trace "
"processing tools for convenient access.\n");
for (auto *SC : cl::getRegisteredSubcommands()) {
if (*SC)
if (auto C = dispatch(SC)) {
ExitOnError("llvm-xray: ")(C());
return 0;
}
}
cl::PrintHelpMessage(false, true);
}
| //===- llvm-xray.cc - XRay Tool Main Program ------------------------------===//
//
// The LLVM Compiler Infrastructure
//
// This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
//
// This file implements the main entry point for the suite of XRay tools. All
// additional functionality are implemented as subcommands.
//
//===----------------------------------------------------------------------===//
//
// Basic usage:
//
// llvm-xray [options] <subcommand> [subcommand-specific options]
//
#include "xray-registry.h"
#include "llvm/Support/CommandLine.h"
#include "llvm/Support/FileSystem.h"
#include "llvm/Support/raw_ostream.h"
using namespace llvm;
using namespace llvm::xray;
int main(int argc, char *argv[]) {
cl::ParseCommandLineOptions(argc, argv,
"XRay Tools\n\n"
" This program consolidates multiple XRay trace "
"processing tools for convenient access.\n");
for (auto *SC : cl::getRegisteredSubcommands()) {
if (*SC)
if (auto C = dispatch(SC)) {
ExitOnError("llvm-xray: ")(C());
return 0;
}
}
cl::PrintHelpMessage(false, true);
}
| Remove unnecessary include of <unistd.h> | [XRay] Remove unnecessary include of <unistd.h>
git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@285171 91177308-0d34-0410-b5e6-96231b3b80d8
| C++ | apache-2.0 | llvm-mirror/llvm,apple/swift-llvm,apple/swift-llvm,GPUOpen-Drivers/llvm,apple/swift-llvm,apple/swift-llvm,GPUOpen-Drivers/llvm,GPUOpen-Drivers/llvm,apple/swift-llvm,GPUOpen-Drivers/llvm,apple/swift-llvm,apple/swift-llvm,llvm-mirror/llvm,llvm-mirror/llvm,apple/swift-llvm,GPUOpen-Drivers/llvm,llvm-mirror/llvm,llvm-mirror/llvm,llvm-mirror/llvm,llvm-mirror/llvm,GPUOpen-Drivers/llvm,llvm-mirror/llvm,GPUOpen-Drivers/llvm,llvm-mirror/llvm,GPUOpen-Drivers/llvm | c++ | ## Code Before:
//===- llvm-xray.cc - XRay Tool Main Program ------------------------------===//
//
// The LLVM Compiler Infrastructure
//
// This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
//
// This file implements the main entry point for the suite of XRay tools. All
// additional functionality are implemented as subcommands.
//
//===----------------------------------------------------------------------===//
//
// Basic usage:
//
// llvm-xray [options] <subcommand> [subcommand-specific options]
//
#include "xray-registry.h"
#include "llvm/Support/CommandLine.h"
#include "llvm/Support/FileSystem.h"
#include "llvm/Support/raw_ostream.h"
#include <unistd.h>
using namespace llvm;
using namespace llvm::xray;
int main(int argc, char *argv[]) {
cl::ParseCommandLineOptions(argc, argv,
"XRay Tools\n\n"
" This program consolidates multiple XRay trace "
"processing tools for convenient access.\n");
for (auto *SC : cl::getRegisteredSubcommands()) {
if (*SC)
if (auto C = dispatch(SC)) {
ExitOnError("llvm-xray: ")(C());
return 0;
}
}
cl::PrintHelpMessage(false, true);
}
## Instruction:
[XRay] Remove unnecessary include of <unistd.h>
git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@285171 91177308-0d34-0410-b5e6-96231b3b80d8
## Code After:
//===- llvm-xray.cc - XRay Tool Main Program ------------------------------===//
//
// The LLVM Compiler Infrastructure
//
// This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
//
// This file implements the main entry point for the suite of XRay tools. All
// additional functionality are implemented as subcommands.
//
//===----------------------------------------------------------------------===//
//
// Basic usage:
//
// llvm-xray [options] <subcommand> [subcommand-specific options]
//
#include "xray-registry.h"
#include "llvm/Support/CommandLine.h"
#include "llvm/Support/FileSystem.h"
#include "llvm/Support/raw_ostream.h"
using namespace llvm;
using namespace llvm::xray;
int main(int argc, char *argv[]) {
cl::ParseCommandLineOptions(argc, argv,
"XRay Tools\n\n"
" This program consolidates multiple XRay trace "
"processing tools for convenient access.\n");
for (auto *SC : cl::getRegisteredSubcommands()) {
if (*SC)
if (auto C = dispatch(SC)) {
ExitOnError("llvm-xray: ")(C());
return 0;
}
}
cl::PrintHelpMessage(false, true);
}
|
3375cd1c123c1748a60aa99a642d9c6e662e65e1 | examples/native/hello/__tests__/App.js | examples/native/hello/__tests__/App.js | import 'react-native';
import React from 'react';
import App from '../App';
// Note: test renderer must be required after react-native.
import renderer from 'react-test-renderer';
it('renders correctly', () => {
const tree = renderer.create(
<App />
);
});
| /* eslint-env jest */
import 'react-native';
import React from 'react';
import App from '../App';
// Note: test renderer must be required after react-native.
import renderer from 'react-test-renderer';
it('renders correctly', () => {
const tree = renderer.create(
<App />
);
});
| Fix lint for react-native example | Fix lint for react-native example
| JavaScript | mit | pH200/cycle-react | javascript | ## Code Before:
import 'react-native';
import React from 'react';
import App from '../App';
// Note: test renderer must be required after react-native.
import renderer from 'react-test-renderer';
it('renders correctly', () => {
const tree = renderer.create(
<App />
);
});
## Instruction:
Fix lint for react-native example
## Code After:
/* eslint-env jest */
import 'react-native';
import React from 'react';
import App from '../App';
// Note: test renderer must be required after react-native.
import renderer from 'react-test-renderer';
it('renders correctly', () => {
const tree = renderer.create(
<App />
);
});
|
b47017282e23be837d469378e5d1ac307f8d99c1 | client/app/scripts/directive/projectQuickView.js | client/app/scripts/directive/projectQuickView.js | angular
.module('app')
.directive('projectQuickView', function() {
return {
restrict: 'EA',
replace: false,
templateUrl: 'app/templates/partials/projectQuickView.html',
link: function(scope, element, attrs) {
scope.completed = attrs.completed;
scope.hasDate = function() {
return scope.item.end_date && scope.item.end_date !== null;
};
scope.isContentFromOldSite = function(item) {
return scope.item.end_date == "2012-10-20T04:00:00.000Z";
};
scope.completedStamp = function(item) {
return scope.completed && !scope.isContentFromOldSite(item);
};
}
};
});
| angular
.module('app')
.directive('projectQuickView', function() {
return {
restrict: 'EA',
replace: false,
templateUrl: 'app/templates/partials/projectQuickView.html',
link: function(scope, element, attrs) {
scope.completed = attrs.completed;
scope.hasDate = function() {
return scope.item && scope.item.end_date && scope.item.end_date !== null;
};
scope.isContentFromOldSite = function(item) {
return item && item.end_date == "2012-10-20T04:00:00.000Z";
};
scope.completedStamp = function(item) {
return scope.completed && !scope.isContentFromOldSite(item);
};
}
};
});
| Make a check if end_date exists on the project in the hasDate function | Make a check if end_date exists on the project in the hasDate function
| JavaScript | mit | brettshollenberger/rootstrikers,brettshollenberger/rootstrikers | javascript | ## Code Before:
angular
.module('app')
.directive('projectQuickView', function() {
return {
restrict: 'EA',
replace: false,
templateUrl: 'app/templates/partials/projectQuickView.html',
link: function(scope, element, attrs) {
scope.completed = attrs.completed;
scope.hasDate = function() {
return scope.item.end_date && scope.item.end_date !== null;
};
scope.isContentFromOldSite = function(item) {
return scope.item.end_date == "2012-10-20T04:00:00.000Z";
};
scope.completedStamp = function(item) {
return scope.completed && !scope.isContentFromOldSite(item);
};
}
};
});
## Instruction:
Make a check if end_date exists on the project in the hasDate function
## Code After:
angular
.module('app')
.directive('projectQuickView', function() {
return {
restrict: 'EA',
replace: false,
templateUrl: 'app/templates/partials/projectQuickView.html',
link: function(scope, element, attrs) {
scope.completed = attrs.completed;
scope.hasDate = function() {
return scope.item && scope.item.end_date && scope.item.end_date !== null;
};
scope.isContentFromOldSite = function(item) {
return item && item.end_date == "2012-10-20T04:00:00.000Z";
};
scope.completedStamp = function(item) {
return scope.completed && !scope.isContentFromOldSite(item);
};
}
};
});
|
f7d62d32bc1314658e1f5a32f7279840a7ccd57d | lib/supervisord_templates/app.conf.erb | lib/supervisord_templates/app.conf.erb | [unix_http_server]
file=/srv/stonepath-example/supervisor.sock
[supervisord]
logfile=/srv/stonepath-example/supervisord.log
logfile_maxbytes=50MB
logfile_backups=10
loglevel=error
pidfile=/srv/stonepath-example/supervisord.pid
nodaemon=false
minfds=1024
minprocs=200
[rpcinterface:supervisor]
supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface
[supervisorctl]
serverurl=unix:///tmp/supervisor_stonepath-example.sock
<%
app_names = []
engine.each_process do |name, process|
1.upto(engine.formation[name]) do |num|
port = engine.port_for(process, num)
full_name = "#{app}-#{name}-#{num}"
environment = engine.env.merge("PORT" => port.to_s).map do |key, value|
"#{key}=#{shell_quote(value)}"
end
app_names << full_name
%>
[program:<%= full_name %>]
command=<%= process.command %>
autostart=true
autorestart=true
stopsignal=QUIT
log=<%= log %>
directory=<%= engine.root %>
environment=<%= environment.join(',') %><%
end
end
%>
[group:<%= app %>]
programs=<%= app_names.join(',') %>
| [unix_http_server]
file=/srv/fourth-year-project-system/supervisor.sock
[supervisord]
logfile=/srv/fourth-year-project-system/supervisord.log
logfile_maxbytes=50MB
logfile_backups=10
loglevel=error
pidfile=/srv/fourth-year-project-system/supervisord.pid
nodaemon=false
minfds=1024
minprocs=200
[rpcinterface:supervisor]
supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface
[supervisorctl]
serverurl=unix:///tmp/supervisor_fourth-year-project-system.sock
<%
app_names = []
engine.each_process do |name, process|
1.upto(engine.formation[name]) do |num|
port = engine.port_for(process, num)
full_name = "#{app}-#{name}-#{num}"
environment = engine.env.merge("PORT" => port.to_s).map do |key, value|
"#{key}=#{shell_quote(value)}"
end
app_names << full_name
%>
[program:<%= full_name %>]
command=<%= process.command %>
autostart=true
autorestart=true
stopsignal=QUIT
log=<%= log %>
directory=<%= engine.root %>
environment=<%= environment.join(',') %><%
end
end
%>
[group:<%= app %>]
programs=<%= app_names.join(',') %>
| Use the correct application name | supervisor_templates: Use the correct application name
| HTML+ERB | mit | WorkflowsOnRails/fourth-year-project-system,WorkflowsOnRails/fourth-year-project-system | html+erb | ## Code Before:
[unix_http_server]
file=/srv/stonepath-example/supervisor.sock
[supervisord]
logfile=/srv/stonepath-example/supervisord.log
logfile_maxbytes=50MB
logfile_backups=10
loglevel=error
pidfile=/srv/stonepath-example/supervisord.pid
nodaemon=false
minfds=1024
minprocs=200
[rpcinterface:supervisor]
supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface
[supervisorctl]
serverurl=unix:///tmp/supervisor_stonepath-example.sock
<%
app_names = []
engine.each_process do |name, process|
1.upto(engine.formation[name]) do |num|
port = engine.port_for(process, num)
full_name = "#{app}-#{name}-#{num}"
environment = engine.env.merge("PORT" => port.to_s).map do |key, value|
"#{key}=#{shell_quote(value)}"
end
app_names << full_name
%>
[program:<%= full_name %>]
command=<%= process.command %>
autostart=true
autorestart=true
stopsignal=QUIT
log=<%= log %>
directory=<%= engine.root %>
environment=<%= environment.join(',') %><%
end
end
%>
[group:<%= app %>]
programs=<%= app_names.join(',') %>
## Instruction:
supervisor_templates: Use the correct application name
## Code After:
[unix_http_server]
file=/srv/fourth-year-project-system/supervisor.sock
[supervisord]
logfile=/srv/fourth-year-project-system/supervisord.log
logfile_maxbytes=50MB
logfile_backups=10
loglevel=error
pidfile=/srv/fourth-year-project-system/supervisord.pid
nodaemon=false
minfds=1024
minprocs=200
[rpcinterface:supervisor]
supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface
[supervisorctl]
serverurl=unix:///tmp/supervisor_fourth-year-project-system.sock
<%
app_names = []
engine.each_process do |name, process|
1.upto(engine.formation[name]) do |num|
port = engine.port_for(process, num)
full_name = "#{app}-#{name}-#{num}"
environment = engine.env.merge("PORT" => port.to_s).map do |key, value|
"#{key}=#{shell_quote(value)}"
end
app_names << full_name
%>
[program:<%= full_name %>]
command=<%= process.command %>
autostart=true
autorestart=true
stopsignal=QUIT
log=<%= log %>
directory=<%= engine.root %>
environment=<%= environment.join(',') %><%
end
end
%>
[group:<%= app %>]
programs=<%= app_names.join(',') %>
|
0d413bd7372143b40dd463e8856b950b2f2e5af9 | types/react-big-calendar/lib/addons/dragAndDrop.d.ts | types/react-big-calendar/lib/addons/dragAndDrop.d.ts | import BigCalendar, { BigCalendarProps, Event, stringOrDate } from '../../index';
import React = require('react');
interface withDragAndDropProps<TEvent> {
onEventDrop?: (args: { event: TEvent, start: stringOrDate, end: stringOrDate, allDay: boolean }) => void;
onEventResize?: (args: { event: TEvent, start: stringOrDate, end: stringOrDate, allDay: boolean }) => void;
resizable?: boolean;
}
declare class DragAndDropCalendar<TEvent extends Event = Event, TResource extends object = object>
extends React.Component<BigCalendarProps<TEvent, TResource> & withDragAndDropProps<TEvent>> {}
declare function withDragAndDrop(calendar: typeof BigCalendar): typeof DragAndDropCalendar;
export = withDragAndDrop;
| import BigCalendar, { BigCalendarProps, Event, stringOrDate } from '../../index';
import React from 'react';
interface withDragAndDropProps<TEvent> {
onEventDrop?: (args: { event: TEvent, start: stringOrDate, end: stringOrDate, allDay: boolean }) => void;
onEventResize?: (args: { event: TEvent, start: stringOrDate, end: stringOrDate, allDay: boolean }) => void;
resizable?: boolean;
}
declare class DragAndDropCalendar<TEvent extends Event = Event, TResource extends object = object>
extends React.Component<BigCalendarProps<TEvent, TResource> & withDragAndDropProps<TEvent>> {}
declare function withDragAndDrop(calendar: typeof BigCalendar): typeof DragAndDropCalendar;
export = withDragAndDrop;
| Use ES import to see if it works | Use ES import to see if it works | TypeScript | mit | georgemarshall/DefinitelyTyped,borisyankov/DefinitelyTyped,mcliment/DefinitelyTyped,dsebastien/DefinitelyTyped,georgemarshall/DefinitelyTyped,markogresak/DefinitelyTyped,dsebastien/DefinitelyTyped,georgemarshall/DefinitelyTyped,georgemarshall/DefinitelyTyped,borisyankov/DefinitelyTyped | typescript | ## Code Before:
import BigCalendar, { BigCalendarProps, Event, stringOrDate } from '../../index';
import React = require('react');
interface withDragAndDropProps<TEvent> {
onEventDrop?: (args: { event: TEvent, start: stringOrDate, end: stringOrDate, allDay: boolean }) => void;
onEventResize?: (args: { event: TEvent, start: stringOrDate, end: stringOrDate, allDay: boolean }) => void;
resizable?: boolean;
}
declare class DragAndDropCalendar<TEvent extends Event = Event, TResource extends object = object>
extends React.Component<BigCalendarProps<TEvent, TResource> & withDragAndDropProps<TEvent>> {}
declare function withDragAndDrop(calendar: typeof BigCalendar): typeof DragAndDropCalendar;
export = withDragAndDrop;
## Instruction:
Use ES import to see if it works
## Code After:
import BigCalendar, { BigCalendarProps, Event, stringOrDate } from '../../index';
import React from 'react';
interface withDragAndDropProps<TEvent> {
onEventDrop?: (args: { event: TEvent, start: stringOrDate, end: stringOrDate, allDay: boolean }) => void;
onEventResize?: (args: { event: TEvent, start: stringOrDate, end: stringOrDate, allDay: boolean }) => void;
resizable?: boolean;
}
declare class DragAndDropCalendar<TEvent extends Event = Event, TResource extends object = object>
extends React.Component<BigCalendarProps<TEvent, TResource> & withDragAndDropProps<TEvent>> {}
declare function withDragAndDrop(calendar: typeof BigCalendar): typeof DragAndDropCalendar;
export = withDragAndDrop;
|
304ea4aa98818f07bb4a42eb87108f5692da3ee7 | css/style.css | css/style.css | body {
color: white;
}
.header {
margin-top: 50px;
}
.header-img {
margin: 150, 1500, 150, 1500;
width: 400px;
height: 250px;
border-radius: 4px;
}
.demo-layout-transparent {
background: url('../img/background.jpg') center / cover;
}
.demo-layout-transparent .mdl-layout__header,
.demo-layout-transparent .mdl-layout__drawer-button {
color: white;
}
.repo.mdl-card {
margin-top: 50px;
}
@media only screen and (min-width: 350px) {
.header-img {
width: 350px !important;
}
}
.repo > .mdl-card__title {
color: #fff;
height: 176px;
background: url('../img/background.jpg') center / cover;
}
.repo > .mdl-card__menu {
color: #fff;
} | body {
color: white;
}
.header {
margin-top: 50px;
}
.header-img {
margin: 150, 1500, 150, 1500;
width: 400px;
height: 250px;
border-radius: 4px;
}
.demo-layout-transparent {
background: url('../img/background.jpg') center / cover;
}
.demo-layout-transparent .mdl-layout__header,
.demo-layout-transparent .mdl-layout__drawer-button {
color: white;
}
.repo.mdl-card {
margin-top: 50px;
}
@media only screen and (min-width: 320px) {
.header-img {
width: 320px !important;
}
}
.repo > .mdl-card__title {
color: #fff;
height: 176px;
background: url('../img/background.jpg') center / cover;
}
.repo > .mdl-card__menu {
color: #fff;
}
| Set min-width as 320px rather than 350px | Set min-width as 320px rather than 350px
| CSS | mit | christoga/christoga.github.io,christoga/christoga.github.io | css | ## Code Before:
body {
color: white;
}
.header {
margin-top: 50px;
}
.header-img {
margin: 150, 1500, 150, 1500;
width: 400px;
height: 250px;
border-radius: 4px;
}
.demo-layout-transparent {
background: url('../img/background.jpg') center / cover;
}
.demo-layout-transparent .mdl-layout__header,
.demo-layout-transparent .mdl-layout__drawer-button {
color: white;
}
.repo.mdl-card {
margin-top: 50px;
}
@media only screen and (min-width: 350px) {
.header-img {
width: 350px !important;
}
}
.repo > .mdl-card__title {
color: #fff;
height: 176px;
background: url('../img/background.jpg') center / cover;
}
.repo > .mdl-card__menu {
color: #fff;
}
## Instruction:
Set min-width as 320px rather than 350px
## Code After:
body {
color: white;
}
.header {
margin-top: 50px;
}
.header-img {
margin: 150, 1500, 150, 1500;
width: 400px;
height: 250px;
border-radius: 4px;
}
.demo-layout-transparent {
background: url('../img/background.jpg') center / cover;
}
.demo-layout-transparent .mdl-layout__header,
.demo-layout-transparent .mdl-layout__drawer-button {
color: white;
}
.repo.mdl-card {
margin-top: 50px;
}
@media only screen and (min-width: 320px) {
.header-img {
width: 320px !important;
}
}
.repo > .mdl-card__title {
color: #fff;
height: 176px;
background: url('../img/background.jpg') center / cover;
}
.repo > .mdl-card__menu {
color: #fff;
}
|
a6cb8d3c2d79b609a6d5d0550af57aa2b9328f7f | mopidy_vkontakte/actor.py | mopidy_vkontakte/actor.py | from __future__ import unicode_literals
import logging
import pykka
from mopidy.backends import base
from .library import VKLibraryProvider
from .playlists import VKPlaylistsProvider
from .session import VKSession
logger = logging.getLogger('mopidy.backends.vkontakte.actor')
class VKBackend(pykka.ThreadingActor, base.Backend):
def __init__(self, config, audio):
super(VKBackend, self).__init__()
self.config = config
self.session = VKSession(config=self.config)
self.library = VKLibraryProvider(backend=self)
self.playback = VKPlaybackProvider(audio=audio, backend=self)
self.playlists = VKPlaylistsProvider(backend=self)
self.uri_schemes = ['vkontakte']
class VKPlaybackProvider(base.BasePlaybackProvider):
def play(self, track):
return super(VKPlaybackProvider, self).play(track)
| from __future__ import unicode_literals
import logging
import pykka
from mopidy.backends import base
from .library import VKLibraryProvider
from .playlists import VKPlaylistsProvider
from .session import VKSession
logger = logging.getLogger('mopidy.backends.vkontakte.actor')
class VKBackend(pykka.ThreadingActor, base.Backend):
def __init__(self, config, audio):
super(VKBackend, self).__init__()
self.config = config
self.session = VKSession(config=self.config)
self.library = VKLibraryProvider(backend=self)
self.playback = base.BasePlaybackProvider(audio=audio, backend=self)
self.playlists = VKPlaylistsProvider(backend=self)
self.uri_schemes = ['vkontakte']
| Remove PlaybackProvider that does nothing | Remove PlaybackProvider that does nothing
| Python | apache-2.0 | sibuser/mopidy-vkontakte | python | ## Code Before:
from __future__ import unicode_literals
import logging
import pykka
from mopidy.backends import base
from .library import VKLibraryProvider
from .playlists import VKPlaylistsProvider
from .session import VKSession
logger = logging.getLogger('mopidy.backends.vkontakte.actor')
class VKBackend(pykka.ThreadingActor, base.Backend):
def __init__(self, config, audio):
super(VKBackend, self).__init__()
self.config = config
self.session = VKSession(config=self.config)
self.library = VKLibraryProvider(backend=self)
self.playback = VKPlaybackProvider(audio=audio, backend=self)
self.playlists = VKPlaylistsProvider(backend=self)
self.uri_schemes = ['vkontakte']
class VKPlaybackProvider(base.BasePlaybackProvider):
def play(self, track):
return super(VKPlaybackProvider, self).play(track)
## Instruction:
Remove PlaybackProvider that does nothing
## Code After:
from __future__ import unicode_literals
import logging
import pykka
from mopidy.backends import base
from .library import VKLibraryProvider
from .playlists import VKPlaylistsProvider
from .session import VKSession
logger = logging.getLogger('mopidy.backends.vkontakte.actor')
class VKBackend(pykka.ThreadingActor, base.Backend):
def __init__(self, config, audio):
super(VKBackend, self).__init__()
self.config = config
self.session = VKSession(config=self.config)
self.library = VKLibraryProvider(backend=self)
self.playback = base.BasePlaybackProvider(audio=audio, backend=self)
self.playlists = VKPlaylistsProvider(backend=self)
self.uri_schemes = ['vkontakte']
|
d57dd1a7c377c94d2a5880ef271936d642cb06a5 | .zuul.yaml | .zuul.yaml | - project:
templates:
- check-requirements
- lib-forward-testing-python3
- openstack-lower-constraints-jobs
- openstack-python3-wallaby-jobs
- periodic-stable-jobs
- publish-openstack-docs-pti
- release-notes-jobs-python3
| - job:
name: cross-nova-tox-functional
parent: openstack-tox
description: |
Run cross-project functional tests on nova.
vars:
zuul_work_dir: src/opendev.org/openstack/nova
tox_envlist: functional
required-projects:
- openstack/nova
- openstack/oslo.policy
- job:
name: cross-nova-tox-py38
parent: openstack-tox
description: |
Run cross-project unit tests on nova.
vars:
zuul_work_dir: src/opendev.org/openstack/nova
tox_envlist: py38
required-projects:
- openstack/nova
- openstack/oslo.policy
- job:
name: cross-neutron-tox-py38
parent: openstack-tox
description: |
Run cross-project unit tests on neutron.
vars:
zuul_work_dir: src/opendev.org/openstack/neutron
tox_envlist: py38
required-projects:
- openstack/neutron
- openstack/oslo.policy
- project:
templates:
- check-requirements
- lib-forward-testing-python3
- openstack-lower-constraints-jobs
- openstack-python3-wallaby-jobs
- periodic-stable-jobs
- publish-openstack-docs-pti
- release-notes-jobs-python3
check:
jobs:
- cross-nova-tox-py38
- cross-nova-tox-functional
- cross-neutron-tox-py38
gate:
jobs:
- cross-nova-tox-py38
- cross-nova-tox-functional
- cross-neutron-tox-py38
| Add nova/neutron project unit/functional tests job in gate | Add nova/neutron project unit/functional tests job in gate
We do not test the olso policy master code changes with
services unit or functional tests. Tempest job initialize
policy once and run with default rules so it will not be able
to catch all the scenario what unit or functional job does.
They initialize or override policy in parallel and sometime
that help to find the issue like
- https://bugs.launchpad.net/oslo.policy/+bug/1914095
Also this will help us to avoid any new breaking release which
is detected at the time when requirement u-c are updated
Example: https://review.opendev.org/c/openstack/requirements/+/773779
Currently this commit adds only nova & neutron testing but we can add
more service testing later.
Change-Id: Ic54b229a4bf1325adac2cab747bcd19b9f8ecb01
| YAML | apache-2.0 | openstack/oslo.policy | yaml | ## Code Before:
- project:
templates:
- check-requirements
- lib-forward-testing-python3
- openstack-lower-constraints-jobs
- openstack-python3-wallaby-jobs
- periodic-stable-jobs
- publish-openstack-docs-pti
- release-notes-jobs-python3
## Instruction:
Add nova/neutron project unit/functional tests job in gate
We do not test the olso policy master code changes with
services unit or functional tests. Tempest job initialize
policy once and run with default rules so it will not be able
to catch all the scenario what unit or functional job does.
They initialize or override policy in parallel and sometime
that help to find the issue like
- https://bugs.launchpad.net/oslo.policy/+bug/1914095
Also this will help us to avoid any new breaking release which
is detected at the time when requirement u-c are updated
Example: https://review.opendev.org/c/openstack/requirements/+/773779
Currently this commit adds only nova & neutron testing but we can add
more service testing later.
Change-Id: Ic54b229a4bf1325adac2cab747bcd19b9f8ecb01
## Code After:
- job:
name: cross-nova-tox-functional
parent: openstack-tox
description: |
Run cross-project functional tests on nova.
vars:
zuul_work_dir: src/opendev.org/openstack/nova
tox_envlist: functional
required-projects:
- openstack/nova
- openstack/oslo.policy
- job:
name: cross-nova-tox-py38
parent: openstack-tox
description: |
Run cross-project unit tests on nova.
vars:
zuul_work_dir: src/opendev.org/openstack/nova
tox_envlist: py38
required-projects:
- openstack/nova
- openstack/oslo.policy
- job:
name: cross-neutron-tox-py38
parent: openstack-tox
description: |
Run cross-project unit tests on neutron.
vars:
zuul_work_dir: src/opendev.org/openstack/neutron
tox_envlist: py38
required-projects:
- openstack/neutron
- openstack/oslo.policy
- project:
templates:
- check-requirements
- lib-forward-testing-python3
- openstack-lower-constraints-jobs
- openstack-python3-wallaby-jobs
- periodic-stable-jobs
- publish-openstack-docs-pti
- release-notes-jobs-python3
check:
jobs:
- cross-nova-tox-py38
- cross-nova-tox-functional
- cross-neutron-tox-py38
gate:
jobs:
- cross-nova-tox-py38
- cross-nova-tox-functional
- cross-neutron-tox-py38
|
54fb750d8389febff3fa254c4dd5d815b7a60f69 | Nimble/Nimble.h | Nimble/Nimble.h |
FOUNDATION_EXPORT double NimbleVersionNumber;
FOUNDATION_EXPORT const unsigned char NimbleVersionString[];
|
FOUNDATION_EXPORT double NimbleVersionNumber;
FOUNDATION_EXPORT const unsigned char NimbleVersionString[];
#import "DSL.h"
#import "NMBExceptionCapture.h"
| Add Objective-C headers to umbrella header to make project compile | Add Objective-C headers to umbrella header to make project compile
| C | apache-2.0 | DanielAsher/Nimble,AnthonyMDev/Nimble,abbeycode/Nimble,AnthonyMDev/Nimble,twobitlabs/Nimble,twobitlabs/Nimble,abbeycode/Nimble,feinstruktur/Nimble,phatblat/Nimble,AnthonyMDev/Nimble,Quick/Nimble,ashfurrow/Nimble,DanielAsher/Nimble,feinstruktur/Nimble,DanielAsher/Nimble,ashfurrow/Nimble,jeffh/Nimble,abbeycode/Nimble,mishimay/Nimble,dgdosen/Nimble,phatblat/Nimble,feinstruktur/Nimble,phatblat/Nimble,dgdosen/Nimble,Quick/Nimble,abbeycode/Nimble,jeffh/Nimble,mishimay/Nimble,dgdosen/Nimble,mishimay/Nimble,twobitlabs/Nimble,ashfurrow/Nimble,jeffh/Nimble,dgdosen/Nimble,Quick/Nimble,jeffh/Nimble | c | ## Code Before:
FOUNDATION_EXPORT double NimbleVersionNumber;
FOUNDATION_EXPORT const unsigned char NimbleVersionString[];
## Instruction:
Add Objective-C headers to umbrella header to make project compile
## Code After:
FOUNDATION_EXPORT double NimbleVersionNumber;
FOUNDATION_EXPORT const unsigned char NimbleVersionString[];
#import "DSL.h"
#import "NMBExceptionCapture.h"
|
033994218bbc2aa0691c6ad0cb4445bc1f68e04e | cla_frontend/assets-src/javascripts/app/partials/call_centre/case_detail.invalid.html | cla_frontend/assets-src/javascripts/app/partials/call_centre/case_detail.invalid.html | <header>
<h2>Incomplete case</h2>
</header>
<div class="Error Error--basic" ng-show="case_errors.length > 0" data-case-errors>
<p>You must correct the following errors before you can assign a provider:</p>
<ul>
<li ng-repeat="error in case_errors">{{ error.message }}</li>
</ul>
</div>
<div class="Notice" ng-show="case_warnings.length > 0" data-case-warnings>
<p>Please make sure you have checked the following warnings before assigning a provider:</p>
<ul>
<li ng-repeat="warning in case_warnings">{{ warning.message }}</li>
</ul>
</div>
<div class="FormActions">
<button type="button" name="assign-provider" class="Button" ng-show="case_errors.length === 0" ng-click="proceed()">Continue</button>
<a href="" ng-show="case.diagnosis_state !== 'INSCOPE' || case.eligibility_state !== 'yes'" ng-click="proceed()">Refer for second opinion</a>
<a href="" ng-click="close()">Cancel</a>
</div>
| <header>
<h2>Incomplete case</h2>
</header>
<div class="Error Error--basic" ng-show="case_errors.length > 0" data-case-errors>
<p>You must correct the following errors before you can assign a provider:</p>
<ul>
<li ng-repeat="error in case_errors">{{ error.message }}</li>
</ul>
</div>
<div class="Notice" ng-show="case_warnings.length > 0" data-case-warnings>
<p>Please make sure you have checked the following warnings before assigning a provider:</p>
<ul>
<li ng-repeat="warning in case_warnings">{{ warning.message }}</li>
</ul>
</div>
<div class="FormActions">
<button type="button" name="assign-provider" class="Button" ng-show="case_errors.length === 0" ng-click="proceed()">Continue</button>
<a href="" ng-click="close()">Cancel</a>
</div>
| Remove old SPOR refer link | Remove old SPOR refer link
| HTML | mit | ministryofjustice/cla_frontend,ministryofjustice/cla_frontend,ministryofjustice/cla_frontend,ministryofjustice/cla_frontend | html | ## Code Before:
<header>
<h2>Incomplete case</h2>
</header>
<div class="Error Error--basic" ng-show="case_errors.length > 0" data-case-errors>
<p>You must correct the following errors before you can assign a provider:</p>
<ul>
<li ng-repeat="error in case_errors">{{ error.message }}</li>
</ul>
</div>
<div class="Notice" ng-show="case_warnings.length > 0" data-case-warnings>
<p>Please make sure you have checked the following warnings before assigning a provider:</p>
<ul>
<li ng-repeat="warning in case_warnings">{{ warning.message }}</li>
</ul>
</div>
<div class="FormActions">
<button type="button" name="assign-provider" class="Button" ng-show="case_errors.length === 0" ng-click="proceed()">Continue</button>
<a href="" ng-show="case.diagnosis_state !== 'INSCOPE' || case.eligibility_state !== 'yes'" ng-click="proceed()">Refer for second opinion</a>
<a href="" ng-click="close()">Cancel</a>
</div>
## Instruction:
Remove old SPOR refer link
## Code After:
<header>
<h2>Incomplete case</h2>
</header>
<div class="Error Error--basic" ng-show="case_errors.length > 0" data-case-errors>
<p>You must correct the following errors before you can assign a provider:</p>
<ul>
<li ng-repeat="error in case_errors">{{ error.message }}</li>
</ul>
</div>
<div class="Notice" ng-show="case_warnings.length > 0" data-case-warnings>
<p>Please make sure you have checked the following warnings before assigning a provider:</p>
<ul>
<li ng-repeat="warning in case_warnings">{{ warning.message }}</li>
</ul>
</div>
<div class="FormActions">
<button type="button" name="assign-provider" class="Button" ng-show="case_errors.length === 0" ng-click="proceed()">Continue</button>
<a href="" ng-click="close()">Cancel</a>
</div>
|
ccdb3bd5c80094bc65a56053aa3ea782e2711b62 | closure/goog/dom/textassert_test.js | closure/goog/dom/textassert_test.js | // Copyright 2017 The Closure Library Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS-IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
goog.module('goog.dom.textassert_test');
goog.setTestOnly();
var testSuite = goog.require('goog.testing.testSuite');
var textAssert = goog.require('goog.dom.textAssert');
var userAgent = goog.require('goog.userAgent');
testSuite({
shouldRunTests() {
return !userAgent.IE || userAgent.isVersionOrHigher(9);
},
testAssertIsTextWillThrow: function() {
assertThrows(() => textAssert.assertHtmlFree('<b>a<\b>'));
},
testAssertIsTextDoesNotChangeText: function() {
var plain = 'text';
assertEquals(plain, textAssert.assertHtmlFree(plain));
},
});
| // Copyright 2017 The Closure Library Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS-IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
goog.module('goog.dom.textassert_test');
goog.setTestOnly();
const testSuite = goog.require('goog.testing.testSuite');
const textAssert = goog.require('goog.dom.textAssert');
const userAgent = goog.require('goog.userAgent');
testSuite({
shouldRunTests() {
return !userAgent.IE || userAgent.isVersionOrHigher(9);
},
testAssertIsTextThrowsWithHtmlTags: function() {
const e = assertThrows(() => textAssert.assertHtmlFree('<b>a<\\b>'));
assertEquals(
'Assertion failed: String has HTML original: ' +
'<b>a<\\b>, escaped: <b>a<\\b>',
e.message);
},
testAssertIsTextThrowsWithHtmlEntities: function() {
const e = assertThrows(() => {
textAssert.assertHtmlFree('a&b');
});
assertEquals(
'Assertion failed: String has HTML original: ' +
'a&b, escaped: a&amp;b',
e.message);
},
testAssertIsTextDoesNotChangeText: function() {
const plain = 'text';
assertEquals(plain, textAssert.assertHtmlFree(plain));
},
});
| Add a test for HTML entities for goog.dom.textassert, and verify the thrown values in those tests. | Add a test for HTML entities for goog.dom.textassert, and verify the thrown values in those tests.
(This was inspired by investigating the assertion code after having missed some HTML entities in cl/186648009. The assertion is working fine.)
RELNOTES: n/a
-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=187388264
| JavaScript | apache-2.0 | lucidsoftware/closure-library,lucidsoftware/closure-library,google/closure-library,teppeis/closure-library,lucidsoftware/closure-library,google/closure-library,teppeis/closure-library,google/closure-library,teppeis/closure-library,google/closure-library,lucidsoftware/closure-library,teppeis/closure-library,google/closure-library | javascript | ## Code Before:
// Copyright 2017 The Closure Library Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS-IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
goog.module('goog.dom.textassert_test');
goog.setTestOnly();
var testSuite = goog.require('goog.testing.testSuite');
var textAssert = goog.require('goog.dom.textAssert');
var userAgent = goog.require('goog.userAgent');
testSuite({
shouldRunTests() {
return !userAgent.IE || userAgent.isVersionOrHigher(9);
},
testAssertIsTextWillThrow: function() {
assertThrows(() => textAssert.assertHtmlFree('<b>a<\b>'));
},
testAssertIsTextDoesNotChangeText: function() {
var plain = 'text';
assertEquals(plain, textAssert.assertHtmlFree(plain));
},
});
## Instruction:
Add a test for HTML entities for goog.dom.textassert, and verify the thrown values in those tests.
(This was inspired by investigating the assertion code after having missed some HTML entities in cl/186648009. The assertion is working fine.)
RELNOTES: n/a
-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=187388264
## Code After:
// Copyright 2017 The Closure Library Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS-IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
goog.module('goog.dom.textassert_test');
goog.setTestOnly();
const testSuite = goog.require('goog.testing.testSuite');
const textAssert = goog.require('goog.dom.textAssert');
const userAgent = goog.require('goog.userAgent');
testSuite({
shouldRunTests() {
return !userAgent.IE || userAgent.isVersionOrHigher(9);
},
testAssertIsTextThrowsWithHtmlTags: function() {
const e = assertThrows(() => textAssert.assertHtmlFree('<b>a<\\b>'));
assertEquals(
'Assertion failed: String has HTML original: ' +
'<b>a<\\b>, escaped: <b>a<\\b>',
e.message);
},
testAssertIsTextThrowsWithHtmlEntities: function() {
const e = assertThrows(() => {
textAssert.assertHtmlFree('a&b');
});
assertEquals(
'Assertion failed: String has HTML original: ' +
'a&b, escaped: a&amp;b',
e.message);
},
testAssertIsTextDoesNotChangeText: function() {
const plain = 'text';
assertEquals(plain, textAssert.assertHtmlFree(plain));
},
});
|
3aeac6189227e4b97a868da33220ff28ab1a965c | .github/workflows/control-slycot-src.yml | .github/workflows/control-slycot-src.yml | name: Slycot from source
on: [push, pull_request]
jobs:
build-linux:
runs-on: ubuntu-latest
steps:
- name: Checkout python-control
uses: actions/checkout@v3
with:
path: python-control
- name: Set up Python
uses: actions/setup-python@v2
- name: Install Python dependencies
run: |
# Set up conda
echo $CONDA/bin >> $GITHUB_PATH
# Set up (virtual) X11
sudo apt install -y xvfb
# Install test tools
conda install pip pytest pytest-timeout
# Install python-control dependencies
conda install numpy matplotlib scipy
- name: Checkout Slycot
uses: actions/checkout@v3
with:
repository: python-control/Slycot
submodules: recursive
fetch-depth: 0
path: slycot
- name: Install slycot from source
env:
BLA_VENDOR: Generic
CMAKE_GENERATOR: Unix Makefiles
working-directory: slycot
run: |
# Install compilers, libraries, and development environment
sudo apt-get -y install gfortran cmake --fix-missing
sudo apt-get -y install libblas-dev liblapack-dev
conda install -c conda-forge scikit-build setuptools-scm
# Compile and install slycot
pip install -v --no-build-isolation --no-deps .
- name: Test with pytest
working-directory: python-control
run: xvfb-run --auto-servernum pytest control/tests
| name: Slycot from source
on: [push, pull_request]
jobs:
build-linux:
runs-on: ubuntu-latest
steps:
- name: Checkout python-control
uses: actions/checkout@v3
with:
path: python-control
- name: Set up Python
uses: actions/setup-python@v2
- name: Install Python dependencies
run: |
# Set up (virtual) X11
sudo apt install -y xvfb
# Install test tools
pip install pytest pytest-timeout
# Install python-control dependencies
pip install numpy matplotlib scipy
- name: Checkout Slycot
uses: actions/checkout@v3
with:
repository: python-control/Slycot
submodules: recursive
fetch-depth: 0
path: slycot
- name: Install slycot from source
env:
BLA_VENDOR: Generic
CMAKE_GENERATOR: Unix Makefiles
working-directory: slycot
run: |
# Install compilers, libraries, and development environment
sudo apt-get -y install gfortran cmake --fix-missing
sudo apt-get -y install libblas-dev liblapack-dev
# Compile and install slycot
pip install -v .
- name: Test with pytest
working-directory: python-control
run: xvfb-run --auto-servernum pytest control/tests
| Build Slycot from source using PyPI wheels | Build Slycot from source using PyPI wheels
| YAML | bsd-3-clause | python-control/python-control | yaml | ## Code Before:
name: Slycot from source
on: [push, pull_request]
jobs:
build-linux:
runs-on: ubuntu-latest
steps:
- name: Checkout python-control
uses: actions/checkout@v3
with:
path: python-control
- name: Set up Python
uses: actions/setup-python@v2
- name: Install Python dependencies
run: |
# Set up conda
echo $CONDA/bin >> $GITHUB_PATH
# Set up (virtual) X11
sudo apt install -y xvfb
# Install test tools
conda install pip pytest pytest-timeout
# Install python-control dependencies
conda install numpy matplotlib scipy
- name: Checkout Slycot
uses: actions/checkout@v3
with:
repository: python-control/Slycot
submodules: recursive
fetch-depth: 0
path: slycot
- name: Install slycot from source
env:
BLA_VENDOR: Generic
CMAKE_GENERATOR: Unix Makefiles
working-directory: slycot
run: |
# Install compilers, libraries, and development environment
sudo apt-get -y install gfortran cmake --fix-missing
sudo apt-get -y install libblas-dev liblapack-dev
conda install -c conda-forge scikit-build setuptools-scm
# Compile and install slycot
pip install -v --no-build-isolation --no-deps .
- name: Test with pytest
working-directory: python-control
run: xvfb-run --auto-servernum pytest control/tests
## Instruction:
Build Slycot from source using PyPI wheels
## Code After:
name: Slycot from source
on: [push, pull_request]
jobs:
build-linux:
runs-on: ubuntu-latest
steps:
- name: Checkout python-control
uses: actions/checkout@v3
with:
path: python-control
- name: Set up Python
uses: actions/setup-python@v2
- name: Install Python dependencies
run: |
# Set up (virtual) X11
sudo apt install -y xvfb
# Install test tools
pip install pytest pytest-timeout
# Install python-control dependencies
pip install numpy matplotlib scipy
- name: Checkout Slycot
uses: actions/checkout@v3
with:
repository: python-control/Slycot
submodules: recursive
fetch-depth: 0
path: slycot
- name: Install slycot from source
env:
BLA_VENDOR: Generic
CMAKE_GENERATOR: Unix Makefiles
working-directory: slycot
run: |
# Install compilers, libraries, and development environment
sudo apt-get -y install gfortran cmake --fix-missing
sudo apt-get -y install libblas-dev liblapack-dev
# Compile and install slycot
pip install -v .
- name: Test with pytest
working-directory: python-control
run: xvfb-run --auto-servernum pytest control/tests
|
aca158817c21b8baeeb64d7290d61c32a79124f9 | tests/test_heat_demand.py | tests/test_heat_demand.py |
import numpy as np
from demandlib.examples import heat_demand_example
def test_heat_example():
"""Test the results of the heat example."""
ann_demands_per_type = {'efh': 25000,
'mfh': 80000,
'ghd': 140000}
demands = heat_demand_example.heat_example(
ann_demands_per_type=ann_demands_per_type,
testmode=True).sum()
for key in ann_demands_per_type:
assert np.isclose(demands[key], ann_demands_per_type[key])
|
import numpy as np
from demandlib.examples import heat_demand_example
def test_heat_example():
"""Test the results of the heat example."""
ann_demands_per_type = {'efh': 25000,
'mfh': 80000,
'ghd': 140000}
demands = heat_demand_example.heat_example(
ann_demands_per_type=ann_demands_per_type,
testmode=True).sum()
for key in ann_demands_per_type:
assert np.isclose(demands[key], ann_demands_per_type[key], rtol=1e-04)
| Increase tollerance for heat demand test | Increase tollerance for heat demand test
| Python | mit | oemof/demandlib | python | ## Code Before:
import numpy as np
from demandlib.examples import heat_demand_example
def test_heat_example():
"""Test the results of the heat example."""
ann_demands_per_type = {'efh': 25000,
'mfh': 80000,
'ghd': 140000}
demands = heat_demand_example.heat_example(
ann_demands_per_type=ann_demands_per_type,
testmode=True).sum()
for key in ann_demands_per_type:
assert np.isclose(demands[key], ann_demands_per_type[key])
## Instruction:
Increase tollerance for heat demand test
## Code After:
import numpy as np
from demandlib.examples import heat_demand_example
def test_heat_example():
"""Test the results of the heat example."""
ann_demands_per_type = {'efh': 25000,
'mfh': 80000,
'ghd': 140000}
demands = heat_demand_example.heat_example(
ann_demands_per_type=ann_demands_per_type,
testmode=True).sum()
for key in ann_demands_per_type:
assert np.isclose(demands[key], ann_demands_per_type[key], rtol=1e-04)
|
7b6fb2d4b594b2baed85b349df110b34a76dff12 | README.md | README.md | The Steam Swiss API Knife
=========================
A tool to explore the Steam API. The goal is to provide a simple
way for exploration and documentation of the API requests. Feel
free to contribute docs at http://wiki.teamfortress.com/wiki/WebAPI
|
A tool to explore the Steam API. The goal is to provide a simple
way for exploration and documentation of the API requests. Feel
free to contribute docs at http://wiki.teamfortress.com/wiki/WebAPI
### Using for API documentation ###
There is a feature in the swiss API knife intended to assist in
creating documentation pages on the [TF wiki](http://wiki.teamfortress.com/wiki/WebAPI).
To generate a skeleton page for the wiki API simply pass `-f wiki` when running the tool.
Also see the output of `-h` for more options for filtering. Refer to the existing API pages
for examples on the structure to use for the JSON output documentation.
| Add instructions for wiki usage | Add instructions for wiki usage
| Markdown | isc | Lagg/steam-swissapiknife | markdown | ## Code Before:
The Steam Swiss API Knife
=========================
A tool to explore the Steam API. The goal is to provide a simple
way for exploration and documentation of the API requests. Feel
free to contribute docs at http://wiki.teamfortress.com/wiki/WebAPI
## Instruction:
Add instructions for wiki usage
## Code After:
A tool to explore the Steam API. The goal is to provide a simple
way for exploration and documentation of the API requests. Feel
free to contribute docs at http://wiki.teamfortress.com/wiki/WebAPI
### Using for API documentation ###
There is a feature in the swiss API knife intended to assist in
creating documentation pages on the [TF wiki](http://wiki.teamfortress.com/wiki/WebAPI).
To generate a skeleton page for the wiki API simply pass `-f wiki` when running the tool.
Also see the output of `-h` for more options for filtering. Refer to the existing API pages
for examples on the structure to use for the JSON output documentation.
|
8e1e8d0f081e715ca6f48abab180376c017a7e4e | tenets/codelingo/effective-go/package-comment/codelingo.yaml | tenets/codelingo/effective-go/package-comment/codelingo.yaml | tenets:
- name: package-comment
flows:
codelingo/docs:
title: Package comment
body: |
Every package should have a package comment, a block comment preceding the package clause.
For multi-file packages, the package comment only needs to be present in one file, and any one will do.
The package comment should introduce the package and provide information relevant to the package as a
whole. It will appear first on the godoc page and should set up the detailed documentation that follows.
codelingo/review:
comment: |
Every package should have a package comment, a block comment preceding the package clause.
For multi-file packages, the package comment only needs to be present in one file, and any one will do.
query: |
import codelingo/ast/go
@review comment
go.file(depth = any):
go.ident:
name as packageName
exclude:
go.comment(depth = any):
start_offset == 0
exclude:
go.file:
go.ident:
name == packageName
go.comment(depth = any):
start_offset == 0
| tenets:
- name: package-comment
flows:
codelingo/docs:
title: Package comment
body: |
Every package should have a package comment, a block comment preceding the package clause.
For multi-file packages, the package comment only needs to be present in one file, and any one will do.
The package comment should introduce the package and provide information relevant to the package as a
whole. It will appear first on the godoc page and should set up the detailed documentation that follows.
codelingo/review:
comment: |
Every package should have a package comment, a block comment preceding the package clause.
For multi-file packages, the package comment only needs to be present in one file, and any one will do.
query: |
import codelingo/ast/go
@review comment
go.file(depth = any):
exclude:
go.comment_group:
go.comment
| Edit tenet to just look for attached package comment in file | Edit tenet to just look for attached package comment in file
| YAML | agpl-3.0 | lingo-reviews/lingo,lingo-reviews/lingo | yaml | ## Code Before:
tenets:
- name: package-comment
flows:
codelingo/docs:
title: Package comment
body: |
Every package should have a package comment, a block comment preceding the package clause.
For multi-file packages, the package comment only needs to be present in one file, and any one will do.
The package comment should introduce the package and provide information relevant to the package as a
whole. It will appear first on the godoc page and should set up the detailed documentation that follows.
codelingo/review:
comment: |
Every package should have a package comment, a block comment preceding the package clause.
For multi-file packages, the package comment only needs to be present in one file, and any one will do.
query: |
import codelingo/ast/go
@review comment
go.file(depth = any):
go.ident:
name as packageName
exclude:
go.comment(depth = any):
start_offset == 0
exclude:
go.file:
go.ident:
name == packageName
go.comment(depth = any):
start_offset == 0
## Instruction:
Edit tenet to just look for attached package comment in file
## Code After:
tenets:
- name: package-comment
flows:
codelingo/docs:
title: Package comment
body: |
Every package should have a package comment, a block comment preceding the package clause.
For multi-file packages, the package comment only needs to be present in one file, and any one will do.
The package comment should introduce the package and provide information relevant to the package as a
whole. It will appear first on the godoc page and should set up the detailed documentation that follows.
codelingo/review:
comment: |
Every package should have a package comment, a block comment preceding the package clause.
For multi-file packages, the package comment only needs to be present in one file, and any one will do.
query: |
import codelingo/ast/go
@review comment
go.file(depth = any):
exclude:
go.comment_group:
go.comment
|
463e1938e893dec5049b8b1a83736e83783246f4 | simple-git/src/lib/plugins/timout-plugin.ts | simple-git/src/lib/plugins/timout-plugin.ts | import { SimpleGitOptions } from '../types';
import { SimpleGitPlugin } from './simple-git-plugin';
import { GitPluginError } from '../errors/git-plugin-error';
export function timeoutPlugin({block}: Exclude<SimpleGitOptions['timeout'], undefined>): SimpleGitPlugin<'spawn.after'> | void {
if (block > 0) {
return {
type: 'spawn.after',
action(_data, context) {
let timeout: NodeJS.Timeout;
function wait() {
timeout && clearTimeout(timeout);
timeout = setTimeout(kill, block);
}
function stop() {
context.spawned.stdout?.off('data', wait);
context.spawned.stderr?.off('data', wait);
context.spawned.off('exit', stop);
context.spawned.off('close', stop);
}
function kill() {
stop()
context.kill(
new GitPluginError(undefined, 'timeout', `block timeout reached`)
);
}
context.spawned.stdout?.on('data', wait);
context.spawned.stderr?.on('data', wait);
context.spawned.on('exit', stop);
context.spawned.on('close', stop);
wait();
}
}
}
}
| import { SimpleGitOptions } from '../types';
import { SimpleGitPlugin } from './simple-git-plugin';
import { GitPluginError } from '../errors/git-plugin-error';
export function timeoutPlugin({block}: Exclude<SimpleGitOptions['timeout'], undefined>): SimpleGitPlugin<'spawn.after'> | void {
if (block > 0) {
return {
type: 'spawn.after',
action(_data, context) {
let timeout: NodeJS.Timeout;
function wait() {
timeout && clearTimeout(timeout);
timeout = setTimeout(kill, block);
}
function stop() {
context.spawned.stdout?.off('data', wait);
context.spawned.stderr?.off('data', wait);
context.spawned.off('exit', stop);
context.spawned.off('close', stop);
timeout && clearTimeout(timeout);
}
function kill() {
stop()
context.kill(
new GitPluginError(undefined, 'timeout', `block timeout reached`)
);
}
context.spawned.stdout?.on('data', wait);
context.spawned.stderr?.on('data', wait);
context.spawned.on('exit', stop);
context.spawned.on('close', stop);
wait();
}
}
}
}
| Clear timeout upon stop to avoid hanging the process | Clear timeout upon stop to avoid hanging the process
| TypeScript | mit | steveukx/git-js,steveukx/git-js | typescript | ## Code Before:
import { SimpleGitOptions } from '../types';
import { SimpleGitPlugin } from './simple-git-plugin';
import { GitPluginError } from '../errors/git-plugin-error';
export function timeoutPlugin({block}: Exclude<SimpleGitOptions['timeout'], undefined>): SimpleGitPlugin<'spawn.after'> | void {
if (block > 0) {
return {
type: 'spawn.after',
action(_data, context) {
let timeout: NodeJS.Timeout;
function wait() {
timeout && clearTimeout(timeout);
timeout = setTimeout(kill, block);
}
function stop() {
context.spawned.stdout?.off('data', wait);
context.spawned.stderr?.off('data', wait);
context.spawned.off('exit', stop);
context.spawned.off('close', stop);
}
function kill() {
stop()
context.kill(
new GitPluginError(undefined, 'timeout', `block timeout reached`)
);
}
context.spawned.stdout?.on('data', wait);
context.spawned.stderr?.on('data', wait);
context.spawned.on('exit', stop);
context.spawned.on('close', stop);
wait();
}
}
}
}
## Instruction:
Clear timeout upon stop to avoid hanging the process
## Code After:
import { SimpleGitOptions } from '../types';
import { SimpleGitPlugin } from './simple-git-plugin';
import { GitPluginError } from '../errors/git-plugin-error';
export function timeoutPlugin({block}: Exclude<SimpleGitOptions['timeout'], undefined>): SimpleGitPlugin<'spawn.after'> | void {
if (block > 0) {
return {
type: 'spawn.after',
action(_data, context) {
let timeout: NodeJS.Timeout;
function wait() {
timeout && clearTimeout(timeout);
timeout = setTimeout(kill, block);
}
function stop() {
context.spawned.stdout?.off('data', wait);
context.spawned.stderr?.off('data', wait);
context.spawned.off('exit', stop);
context.spawned.off('close', stop);
timeout && clearTimeout(timeout);
}
function kill() {
stop()
context.kill(
new GitPluginError(undefined, 'timeout', `block timeout reached`)
);
}
context.spawned.stdout?.on('data', wait);
context.spawned.stderr?.on('data', wait);
context.spawned.on('exit', stop);
context.spawned.on('close', stop);
wait();
}
}
}
}
|
a3b50a4f17fe21ed2c90ccafe1f1a088780de9a6 | src/core/stores/ApiStore.js | src/core/stores/ApiStore.js | var Reflux = require('reflux');
var config = require('./../../../config');
var ApiActions = require('./../actions/ApiActions');
var buffer = [];
var ws;
var ApiStore = Reflux.createStore({
init: function () {
ws = new WebSocket('ws://' + config.host + ':' + config.port);
ws.onmessage = function (event) {
console.log(JSON.parse(event.data));
ApiStore.trigger(JSON.parse(event.data));
};
ws.onopen = function () {
buffer.forEach(function (request) {
ws.send(JSON.stringify(request));
});
};
this.listenTo(ApiActions.get, this.get);
},
get: function (id, params) {
if (ws.readyState !== WebSocket.OPEN) {
buffer.push({
id: id,
params: params || {}
});
return;
}
ws.send(JSON.stringify({
id: id,
params: params || {}
}));
}
});
module.exports = ApiStore; | var Reflux = require('reflux');
var config = require('./../../../config');
var ApiActions = require('./../actions/ApiActions');
var buffer = [];
var ws;
var ApiStore = Reflux.createStore({
init: function () {
ws = new WebSocket('ws://' + window.document.location.host);
ws.onmessage = function (event) {
console.log(JSON.parse(event.data));
ApiStore.trigger(JSON.parse(event.data));
};
ws.onopen = function () {
buffer.forEach(function (request) {
ws.send(JSON.stringify(request));
});
};
this.listenTo(ApiActions.get, this.get);
},
get: function (id, params) {
if (ws.readyState !== WebSocket.OPEN) {
buffer.push({
id: id,
params: params || {}
});
return;
}
ws.send(JSON.stringify({
id: id,
params: params || {}
}));
}
});
module.exports = ApiStore; | Fix problem with ws host:port mismatch on heroku | Fix problem with ws host:port mismatch on heroku
| JavaScript | mit | plouc/mozaik,backjo/mozaikdummyfork,danielw92/mozaik,beni55/mozaik,codeaudit/mozaik,tlenclos/mozaik,juhamust/mozaik,michaelchiche/mozaik,danielw92/mozaik,plouc/mozaik,juhamust/mozaik,beni55/mozaik,backjo/mozaikdummyfork,michaelchiche/mozaik,tlenclos/mozaik,codeaudit/mozaik | javascript | ## Code Before:
var Reflux = require('reflux');
var config = require('./../../../config');
var ApiActions = require('./../actions/ApiActions');
var buffer = [];
var ws;
var ApiStore = Reflux.createStore({
init: function () {
ws = new WebSocket('ws://' + config.host + ':' + config.port);
ws.onmessage = function (event) {
console.log(JSON.parse(event.data));
ApiStore.trigger(JSON.parse(event.data));
};
ws.onopen = function () {
buffer.forEach(function (request) {
ws.send(JSON.stringify(request));
});
};
this.listenTo(ApiActions.get, this.get);
},
get: function (id, params) {
if (ws.readyState !== WebSocket.OPEN) {
buffer.push({
id: id,
params: params || {}
});
return;
}
ws.send(JSON.stringify({
id: id,
params: params || {}
}));
}
});
module.exports = ApiStore;
## Instruction:
Fix problem with ws host:port mismatch on heroku
## Code After:
var Reflux = require('reflux');
var config = require('./../../../config');
var ApiActions = require('./../actions/ApiActions');
var buffer = [];
var ws;
var ApiStore = Reflux.createStore({
init: function () {
ws = new WebSocket('ws://' + window.document.location.host);
ws.onmessage = function (event) {
console.log(JSON.parse(event.data));
ApiStore.trigger(JSON.parse(event.data));
};
ws.onopen = function () {
buffer.forEach(function (request) {
ws.send(JSON.stringify(request));
});
};
this.listenTo(ApiActions.get, this.get);
},
get: function (id, params) {
if (ws.readyState !== WebSocket.OPEN) {
buffer.push({
id: id,
params: params || {}
});
return;
}
ws.send(JSON.stringify({
id: id,
params: params || {}
}));
}
});
module.exports = ApiStore; |
35d74c18380fe1955f7f6a495aef261a98f6be32 | toolchains/syno-alpine-5.1/Makefile | toolchains/syno-alpine-5.1/Makefile | TC_NAME = syno-$(TC_ARCH)
TC_ARCH = alpine
TC_VERS = 5.1
TC_FIRMWARE = 5.1-5004
TC_DIST_NAME = gcc464_glibc217_alpine-GPL.tgz
TC_EXT = tgz
TC_DIST_SITE = http://sourceforge.net/projects/dsgpl/files/DSM%205.1%20Tool%20Chains/Annapurna%20Alpine%20Linux%203.2.40
TC_BASE_DIR = arm-cortex_a15-linux-gnueabi
TC_PREFIX = arm-cortex_a15-linux-gnueabi
TC_TARGET = arm-cortex_a15-linux-gnueabi
TC_CFLAGS =
TC_CPPFLAGS =
TC_CXXFLAGS =
TC_LDFLAGS =
include ../../mk/spksrc.tc.mk
| TC_NAME = syno-$(TC_ARCH)
TC_ARCH = alpine
TC_VERS = 5.1
TC_FIRMWARE = 5.1-5004
TC_DIST_NAME = gcc464_glibc217_alpine-GPL.tgz
TC_EXT = tgz
TC_DIST_SITE = http://sourceforge.net/projects/dsgpl/files/DSM%205.1%20Tool%20Chains/Annapurna%20Alpine%20Linux%203.2.40
TC_BASE_DIR = arm-cortex_a15-linux-gnueabi
TC_PREFIX = arm-cortex_a15-linux-gnueabi
TC_TARGET = arm-cortex_a15-linux-gnueabi
TC_CFLAGS =
TC_CPPFLAGS =
TC_CXXFLAGS =
TC_LDFLAGS =
FIX_TARGET = myFix
include ../../mk/spksrc.tc.mk
.PHONY: myFix
myFix:
chmod -R u+w $(WORK_DIR)
@find $(WORK_DIR)/$(TC_BASE_DIR) -type f -name '*.la' -exec sed -i -e "s|^libdir=.*$$|libdir='$(WORK_DIR)/$(TC_BASE_DIR)/$(TC_BASE_DIR)/lib'|" {} \;
| Fix compile issues with Alpine toolchain | Fix compile issues with Alpine toolchain
| unknown | bsd-3-clause | Grimthorr/spksrc,demorfi/spksrc,Dr-Bean/spksrc,lysin/spksrc,astroganga/spksrc,Grimthorr/spksrc,sea3pea0/spksrc,markbastiaans/spksrc,hmflash/spksrc,Pyrex-FWI/spksrc,mirweb/spksrc,jdierkse/spksrc,sea3pea0/spksrc,sea3pea0/spksrc,GaetanCambier/spksrc,hmflash/spksrc,Grimthorr/spksrc,riverful/spksrc,Foncekar/spksrc,nickbroon/spksrc,thunfischbrot/spksrc,Pyrex-FWI/spksrc,mreppen/spksrc,lost-carrier/spksrc,nickbroon/spksrc,GoodOmens83/spksrc,riverful/spksrc,mirweb/spksrc,bwynants/spksrc,andyblac/spksrc,hmflash/spksrc,Foncekar/spksrc,saschpe/spksrc,nickbroon/spksrc,jdierkse/spksrc,xtech9/spksrc,lost-carrier/spksrc,cdcabrera/spksrc,GoodOmens83/spksrc,GaetanCambier/spksrc,JasOXIII/spksrc,mreppen/spksrc,sangood/spksrc,mreppen/spksrc,Dr-Bean/spksrc,sea3pea0/spksrc,sea3pea0/spksrc,Pyrex-FWI/spksrc,xtech9/spksrc,Dr-Bean/spksrc,adrien-delhorme/spksrc,lost-carrier/spksrc,Dr-Bean/spksrc,jdierkse/spksrc,Foncekar/spksrc,sangood/spksrc,Foncekar/spksrc,nickbroon/spksrc,schumi2004/spksrc,GaetanCambier/spksrc,Pyrex-FWI/spksrc,Foncekar/spksrc,markbastiaans/spksrc,JasOXIII/spksrc,schumi2004/spksrc,GaetanCambier/spksrc,sea3pea0/spksrc,andyblac/spksrc,nickbroon/spksrc,adrien-delhorme/spksrc,markbastiaans/spksrc,GaetanCambier/spksrc,phoenix741/spksrc,Decipher/spksrc,bwynants/spksrc,riverful/spksrc,sangood/spksrc,Dr-Bean/spksrc,thunfischbrot/spksrc,sangood/spksrc,schumi2004/spksrc,Decipher/spksrc,xtech9/spksrc,lost-carrier/spksrc,schumi2004/spksrc,bwynants/spksrc,mirweb/spksrc,hmflash/spksrc,cdcabrera/spksrc,adrien-delhorme/spksrc,saschpe/spksrc,riverful/spksrc,GaetanCambier/spksrc,adrien-delhorme/spksrc,demorfi/spksrc,sangood/spksrc,JasOXIII/spksrc,Foncekar/spksrc,Foncekar/spksrc,GaetanCambier/spksrc,markbastiaans/spksrc,thunfischbrot/spksrc,lysin/spksrc,andyblac/spksrc,Pyrex-FWI/spksrc,Dr-Bean/spksrc,mreppen/spksrc,cdcabrera/spksrc,jdierkse/spksrc,JasOXIII/spksrc,Pyrex-FWI/spksrc,demorfi/spksrc,JasOXIII/spksrc,hmflash/spksrc,sea3pea0/spksrc,astroganga/spksrc,adrien-delhorme/spksrc,Pyrex-FWI/spksrc,nickbroon/spksrc,hmflash/spksrc,riverful/spksrc,schumi2004/spksrc,Grimthorr/spksrc,markbastiaans/spksrc,thunfischbrot/spksrc,mirweb/spksrc,jdierkse/spksrc,andyblac/spksrc,lost-carrier/spksrc,xtech9/spksrc,jdierkse/spksrc,GoodOmens83/spksrc,bwynants/spksrc,jdierkse/spksrc,saschpe/spksrc,cdcabrera/spksrc,saschpe/spksrc,bwynants/spksrc,schumi2004/spksrc,xtech9/spksrc,sea3pea0/spksrc,nickbroon/spksrc,demorfi/spksrc,lost-carrier/spksrc,schumi2004/spksrc,markbastiaans/spksrc,Decipher/spksrc,Grimthorr/spksrc,xtech9/spksrc,astroganga/spksrc,phoenix741/spksrc,GaetanCambier/spksrc,schumi2004/spksrc,saschpe/spksrc,GoodOmens83/spksrc,hmflash/spksrc,markbastiaans/spksrc,jdierkse/spksrc,demorfi/spksrc,andyblac/spksrc,hmflash/spksrc,JasOXIII/spksrc,phoenix741/spksrc,JasOXIII/spksrc,astroganga/spksrc,astroganga/spksrc,adrien-delhorme/spksrc,astroganga/spksrc,nickbroon/spksrc,Grimthorr/spksrc,GoodOmens83/spksrc,Dr-Bean/spksrc,GoodOmens83/spksrc,xtech9/spksrc,adrien-delhorme/spksrc,thunfischbrot/spksrc,phoenix741/spksrc,saschpe/spksrc,thunfischbrot/spksrc,GoodOmens83/spksrc,andyblac/spksrc,markbastiaans/spksrc,thunfischbrot/spksrc,Decipher/spksrc,phoenix741/spksrc,mreppen/spksrc,JasOXIII/spksrc,lysin/spksrc,Decipher/spksrc,Dr-Bean/spksrc,riverful/spksrc,riverful/spksrc,phoenix741/spksrc,cdcabrera/spksrc,riverful/spksrc,lost-carrier/spksrc,bwynants/spksrc,mirweb/spksrc,markbastiaans/spksrc,sea3pea0/spksrc,sangood/spksrc,mirweb/spksrc,andyblac/spksrc,xtech9/spksrc,lost-carrier/spksrc,saschpe/spksrc,demorfi/spksrc,demorfi/spksrc,thunfischbrot/spksrc,phoenix741/spksrc,Pyrex-FWI/spksrc,Decipher/spksrc,sangood/spksrc,mreppen/spksrc,xtech9/spksrc,mirweb/spksrc,phoenix741/spksrc,Decipher/spksrc,adrien-delhorme/spksrc,Grimthorr/spksrc,Foncekar/spksrc,hmflash/spksrc,JasOXIII/spksrc,lysin/spksrc,astroganga/spksrc,sangood/spksrc,GoodOmens83/spksrc,GoodOmens83/spksrc,Pyrex-FWI/spksrc,astroganga/spksrc,cdcabrera/spksrc,andyblac/spksrc,GaetanCambier/spksrc,Decipher/spksrc,demorfi/spksrc,schumi2004/spksrc,sangood/spksrc,bwynants/spksrc,adrien-delhorme/spksrc,thunfischbrot/spksrc,astroganga/spksrc,bwynants/spksrc,cdcabrera/spksrc,demorfi/spksrc,cdcabrera/spksrc,lysin/spksrc,Foncekar/spksrc,cdcabrera/spksrc,lysin/spksrc,mirweb/spksrc,mreppen/spksrc,Grimthorr/spksrc,lost-carrier/spksrc,lysin/spksrc,saschpe/spksrc,lysin/spksrc,mirweb/spksrc,mreppen/spksrc,phoenix741/spksrc,riverful/spksrc,Decipher/spksrc,jdierkse/spksrc,Grimthorr/spksrc,andyblac/spksrc,lysin/spksrc,nickbroon/spksrc,mreppen/spksrc,bwynants/spksrc,mirweb/spksrc,saschpe/spksrc | unknown | ## Code Before:
TC_NAME = syno-$(TC_ARCH)
TC_ARCH = alpine
TC_VERS = 5.1
TC_FIRMWARE = 5.1-5004
TC_DIST_NAME = gcc464_glibc217_alpine-GPL.tgz
TC_EXT = tgz
TC_DIST_SITE = http://sourceforge.net/projects/dsgpl/files/DSM%205.1%20Tool%20Chains/Annapurna%20Alpine%20Linux%203.2.40
TC_BASE_DIR = arm-cortex_a15-linux-gnueabi
TC_PREFIX = arm-cortex_a15-linux-gnueabi
TC_TARGET = arm-cortex_a15-linux-gnueabi
TC_CFLAGS =
TC_CPPFLAGS =
TC_CXXFLAGS =
TC_LDFLAGS =
include ../../mk/spksrc.tc.mk
## Instruction:
Fix compile issues with Alpine toolchain
## Code After:
TC_NAME = syno-$(TC_ARCH)
TC_ARCH = alpine
TC_VERS = 5.1
TC_FIRMWARE = 5.1-5004
TC_DIST_NAME = gcc464_glibc217_alpine-GPL.tgz
TC_EXT = tgz
TC_DIST_SITE = http://sourceforge.net/projects/dsgpl/files/DSM%205.1%20Tool%20Chains/Annapurna%20Alpine%20Linux%203.2.40
TC_BASE_DIR = arm-cortex_a15-linux-gnueabi
TC_PREFIX = arm-cortex_a15-linux-gnueabi
TC_TARGET = arm-cortex_a15-linux-gnueabi
TC_CFLAGS =
TC_CPPFLAGS =
TC_CXXFLAGS =
TC_LDFLAGS =
FIX_TARGET = myFix
include ../../mk/spksrc.tc.mk
.PHONY: myFix
myFix:
chmod -R u+w $(WORK_DIR)
@find $(WORK_DIR)/$(TC_BASE_DIR) -type f -name '*.la' -exec sed -i -e "s|^libdir=.*$$|libdir='$(WORK_DIR)/$(TC_BASE_DIR)/$(TC_BASE_DIR)/lib'|" {} \;
|
3e474eac6046a8d081953790a4bc7ce1e963dbc9 | organize-photos.sh | organize-photos.sh |
find unprocessed -type f -print0 | xargs -0 -I {} exiftool -P \
-d '%Y/%m/%Y%m%d_%H%M%S' \
'-filename<organized/${FileModifyDate}_${SubSecTimeOriginal;}%-c.%le' \
'-filename<organized/${GPSDateTime}_${SubSecTimeOriginal;}%-c.%le' \
'-filename<organized/${MediaCreateDate}_${SubSecTimeOriginal;}%-c.%le' \
'-filename<organized/${ModifyDate}_${SubSecTimeOriginal;}%-c.%le' \
'-filename<organized/${DateTimeOriginal}_${SubSecTimeOriginal;}%-c.%le' \
'-filename<organized/${FileModifyDate}_000%-c.%le' \
'-filename<organized/${GPSDateTime}_000%-c.%le' \
'-filename<organized/${MediaCreateDate}_000%-c.%le' \
'-filename<organized/${ModifyDate}_000%-c.%le' \
'-filename<organized/${DateTimeOriginal}_000%-c.%le' \
{}
|
find Unprocessed -type f -print0 | xargs -0 -I {} exiftool -P \
-d '%Y/%m/%Y%m%d_%H%M%S' \
'-filename<Organized/${FileModifyDate}_${SubSecTimeOriginal;}%-c.%le' \
'-filename<Organized/${GPSDateTime}_${SubSecTimeOriginal;}%-c.%le' \
'-filename<Organized/${MediaCreateDate}_${SubSecTimeOriginal;}%-c.%le' \
'-filename<Organized/${ModifyDate}_${SubSecTimeOriginal;}%-c.%le' \
'-filename<Organized/${DateTimeOriginal}_${SubSecTimeOriginal;}%-c.%le' \
'-filename<Organized/${FileModifyDate}_000%-c.%le' \
'-filename<Organized/${GPSDateTime}_000%-c.%le' \
'-filename<Organized/${MediaCreateDate}_000%-c.%le' \
'-filename<Organized/${ModifyDate}_000%-c.%le' \
'-filename<Organized/${DateTimeOriginal}_000%-c.%le' \
{}
| Rename the src and dest directories | Rename the src and dest directories
| Shell | mit | lightster/random-docs | shell | ## Code Before:
find unprocessed -type f -print0 | xargs -0 -I {} exiftool -P \
-d '%Y/%m/%Y%m%d_%H%M%S' \
'-filename<organized/${FileModifyDate}_${SubSecTimeOriginal;}%-c.%le' \
'-filename<organized/${GPSDateTime}_${SubSecTimeOriginal;}%-c.%le' \
'-filename<organized/${MediaCreateDate}_${SubSecTimeOriginal;}%-c.%le' \
'-filename<organized/${ModifyDate}_${SubSecTimeOriginal;}%-c.%le' \
'-filename<organized/${DateTimeOriginal}_${SubSecTimeOriginal;}%-c.%le' \
'-filename<organized/${FileModifyDate}_000%-c.%le' \
'-filename<organized/${GPSDateTime}_000%-c.%le' \
'-filename<organized/${MediaCreateDate}_000%-c.%le' \
'-filename<organized/${ModifyDate}_000%-c.%le' \
'-filename<organized/${DateTimeOriginal}_000%-c.%le' \
{}
## Instruction:
Rename the src and dest directories
## Code After:
find Unprocessed -type f -print0 | xargs -0 -I {} exiftool -P \
-d '%Y/%m/%Y%m%d_%H%M%S' \
'-filename<Organized/${FileModifyDate}_${SubSecTimeOriginal;}%-c.%le' \
'-filename<Organized/${GPSDateTime}_${SubSecTimeOriginal;}%-c.%le' \
'-filename<Organized/${MediaCreateDate}_${SubSecTimeOriginal;}%-c.%le' \
'-filename<Organized/${ModifyDate}_${SubSecTimeOriginal;}%-c.%le' \
'-filename<Organized/${DateTimeOriginal}_${SubSecTimeOriginal;}%-c.%le' \
'-filename<Organized/${FileModifyDate}_000%-c.%le' \
'-filename<Organized/${GPSDateTime}_000%-c.%le' \
'-filename<Organized/${MediaCreateDate}_000%-c.%le' \
'-filename<Organized/${ModifyDate}_000%-c.%le' \
'-filename<Organized/${DateTimeOriginal}_000%-c.%le' \
{}
|
94b259ef029664b556fc6882a48e6f427b3864fd | .travis.yml | .travis.yml | addons:
apt:
packages:
- libgmp-dev
after_success:
- cd _hakyll/site
- echo haskellweekly.news > CNAME
- git init
- git add .
- git config --global user.email 'taylor@fausak.me'
- git config --global user.name 'Taylor Fausak'
- git commit --author 'Travis CI <builds@travis-ci.org>' --message "Automatic deploy of $TRAVIS_COMMIT"
- git remote add origin "https://$GITHUB_TOKEN@github.com/haskellweekly/haskellweekly.github.io.git"
- git push --force --quiet origin master
before_install:
- mkdir -p ~/.local/bin
- export PATH="$HOME/.local/bin:$PATH"
- travis_retry curl -L https://www.stackage.org/stack/linux-x86_64 | tar xz --wildcards --strip-components=1 -C ~/.local/bin '*/stack'
cache:
directories:
- $HOME/.stack
install:
- stack setup
- stack build --only-dependencies
language: c
script:
- stack build
- stack exec haskell-weekly rebuild
sudo: false
| addons:
apt:
packages:
- libgmp-dev
after_success:
- cd _hakyll/site
- echo haskellweekly.news > CNAME
- git init
- git add .
- git config --global user.email 'taylor@fausak.me'
- git config --global user.name 'Taylor Fausak'
- git commit --author 'Haskell Weekly <info@haskellweekly.news>' --message "Automatic deploy of $TRAVIS_COMMIT"
- git remote add origin "https://$GITHUB_TOKEN@github.com/haskellweekly/haskellweekly.github.io.git"
- git push --force --quiet origin master
before_install:
- mkdir -p ~/.local/bin
- export PATH="$HOME/.local/bin:$PATH"
- travis_retry curl -L https://www.stackage.org/stack/linux-x86_64 | tar xz --wildcards --strip-components=1 -C ~/.local/bin '*/stack'
cache:
directories:
- $HOME/.stack
install:
- stack setup
- stack build --only-dependencies
language: c
script:
- stack build
- stack exec haskell-weekly rebuild
sudo: false
| Change automatic deploy commit author | Change automatic deploy commit author
| YAML | mit | haskellweekly/haskellweekly.github.io,haskellweekly/haskellweekly.github.io | yaml | ## Code Before:
addons:
apt:
packages:
- libgmp-dev
after_success:
- cd _hakyll/site
- echo haskellweekly.news > CNAME
- git init
- git add .
- git config --global user.email 'taylor@fausak.me'
- git config --global user.name 'Taylor Fausak'
- git commit --author 'Travis CI <builds@travis-ci.org>' --message "Automatic deploy of $TRAVIS_COMMIT"
- git remote add origin "https://$GITHUB_TOKEN@github.com/haskellweekly/haskellweekly.github.io.git"
- git push --force --quiet origin master
before_install:
- mkdir -p ~/.local/bin
- export PATH="$HOME/.local/bin:$PATH"
- travis_retry curl -L https://www.stackage.org/stack/linux-x86_64 | tar xz --wildcards --strip-components=1 -C ~/.local/bin '*/stack'
cache:
directories:
- $HOME/.stack
install:
- stack setup
- stack build --only-dependencies
language: c
script:
- stack build
- stack exec haskell-weekly rebuild
sudo: false
## Instruction:
Change automatic deploy commit author
## Code After:
addons:
apt:
packages:
- libgmp-dev
after_success:
- cd _hakyll/site
- echo haskellweekly.news > CNAME
- git init
- git add .
- git config --global user.email 'taylor@fausak.me'
- git config --global user.name 'Taylor Fausak'
- git commit --author 'Haskell Weekly <info@haskellweekly.news>' --message "Automatic deploy of $TRAVIS_COMMIT"
- git remote add origin "https://$GITHUB_TOKEN@github.com/haskellweekly/haskellweekly.github.io.git"
- git push --force --quiet origin master
before_install:
- mkdir -p ~/.local/bin
- export PATH="$HOME/.local/bin:$PATH"
- travis_retry curl -L https://www.stackage.org/stack/linux-x86_64 | tar xz --wildcards --strip-components=1 -C ~/.local/bin '*/stack'
cache:
directories:
- $HOME/.stack
install:
- stack setup
- stack build --only-dependencies
language: c
script:
- stack build
- stack exec haskell-weekly rebuild
sudo: false
|
e5eb0db5a560368f2daa4e197e3cb9bfc0cf04cb | core/src/main/scala/me/shadaj/slinky/core/ExternalComponent.scala | core/src/main/scala/me/shadaj/slinky/core/ExternalComponent.scala | package me.shadaj.slinky.core
import me.shadaj.slinky.core.facade.{ComponentInstance, React}
import scala.language.implicitConversions
import scala.scalajs.js
class BuildingComponent[Props](e: ExternalComponent, props: Props, key: String, ref: js.Object => Unit, writer: Writer[Props]) {
def apply(children: ComponentInstance*): ComponentInstance = {
val written = writer.write(props)
if (key != null) {
written.asInstanceOf[js.Dynamic].updateDynamic("key")(key)
}
if (ref != null) {
written.asInstanceOf[js.Dynamic].updateDynamic("ref")(ref: js.Function1[js.Object, Unit])
}
React.createElement(e.component, written, children: _*)
}
}
object BuildingComponent {
implicit def shortcut(bc: BuildingComponent[_]): ComponentInstance = {
bc()
}
}
trait ExternalComponent {
type Props
val component: js.Object
def apply(p: Props, key: String = null, ref: js.Object => Unit = null)(implicit writer: Writer[Props]): BuildingComponent[Props] = {
new BuildingComponent[Props](this, p, key, ref, writer)
}
}
| package me.shadaj.slinky.core
import me.shadaj.slinky.core.facade.{ComponentInstance, React}
import scala.language.implicitConversions
import scala.language.experimental.macros
import scala.reflect.macros.blackbox.Context
import scala.scalajs.js
class BuildingComponent[P](e: ExternalComponent, props: P, key: String, ref: js.Object => Unit) {
def apply(children: ComponentInstance*)(implicit writer: Writer[P]): ComponentInstance = {
val written = writer.write(props)
if (key != null) {
written.asInstanceOf[js.Dynamic].updateDynamic("key")(key)
}
if (ref != null) {
written.asInstanceOf[js.Dynamic].updateDynamic("ref")(ref: js.Function1[js.Object, Unit])
}
React.createElement(e.component, written, children: _*)
}
}
object BuildingComponent {
implicit def make[P]: BuildingComponent[P] => ComponentInstance = macro BuildingComponentMacros.makeImpl[P]
}
object BuildingComponentMacros {
// SUPER SKETCHY INTELLIJ HACK
def makeImpl[P: c.WeakTypeTag](c: Context): c.Expr[BuildingComponent[P] => ComponentInstance] = {
import c.universe._
val propsType = implicitly[c.WeakTypeTag[P]].tpe
c.Expr[BuildingComponent[P] => ComponentInstance](
q"(bc: BuildingComponent[$propsType]) => bc.apply()(_root_.scala.Predef.implicitly[_root_.me.shadaj.slinky.core.Writer[$propsType]])"
)
}
}
abstract class ExternalComponent {
type Props
val component: js.Object
def apply(p: Props, key: String = null, ref: js.Object => Unit = null): BuildingComponent[Props] = {
new BuildingComponent(this, p, key, ref)
}
}
| Add macro hack to make no-children compile in IntelliJ and SBT | Add macro hack to make no-children compile in IntelliJ and SBT
| Scala | mit | shadaj/slinky | scala | ## Code Before:
package me.shadaj.slinky.core
import me.shadaj.slinky.core.facade.{ComponentInstance, React}
import scala.language.implicitConversions
import scala.scalajs.js
class BuildingComponent[Props](e: ExternalComponent, props: Props, key: String, ref: js.Object => Unit, writer: Writer[Props]) {
def apply(children: ComponentInstance*): ComponentInstance = {
val written = writer.write(props)
if (key != null) {
written.asInstanceOf[js.Dynamic].updateDynamic("key")(key)
}
if (ref != null) {
written.asInstanceOf[js.Dynamic].updateDynamic("ref")(ref: js.Function1[js.Object, Unit])
}
React.createElement(e.component, written, children: _*)
}
}
object BuildingComponent {
implicit def shortcut(bc: BuildingComponent[_]): ComponentInstance = {
bc()
}
}
trait ExternalComponent {
type Props
val component: js.Object
def apply(p: Props, key: String = null, ref: js.Object => Unit = null)(implicit writer: Writer[Props]): BuildingComponent[Props] = {
new BuildingComponent[Props](this, p, key, ref, writer)
}
}
## Instruction:
Add macro hack to make no-children compile in IntelliJ and SBT
## Code After:
package me.shadaj.slinky.core
import me.shadaj.slinky.core.facade.{ComponentInstance, React}
import scala.language.implicitConversions
import scala.language.experimental.macros
import scala.reflect.macros.blackbox.Context
import scala.scalajs.js
class BuildingComponent[P](e: ExternalComponent, props: P, key: String, ref: js.Object => Unit) {
def apply(children: ComponentInstance*)(implicit writer: Writer[P]): ComponentInstance = {
val written = writer.write(props)
if (key != null) {
written.asInstanceOf[js.Dynamic].updateDynamic("key")(key)
}
if (ref != null) {
written.asInstanceOf[js.Dynamic].updateDynamic("ref")(ref: js.Function1[js.Object, Unit])
}
React.createElement(e.component, written, children: _*)
}
}
object BuildingComponent {
implicit def make[P]: BuildingComponent[P] => ComponentInstance = macro BuildingComponentMacros.makeImpl[P]
}
object BuildingComponentMacros {
// SUPER SKETCHY INTELLIJ HACK
def makeImpl[P: c.WeakTypeTag](c: Context): c.Expr[BuildingComponent[P] => ComponentInstance] = {
import c.universe._
val propsType = implicitly[c.WeakTypeTag[P]].tpe
c.Expr[BuildingComponent[P] => ComponentInstance](
q"(bc: BuildingComponent[$propsType]) => bc.apply()(_root_.scala.Predef.implicitly[_root_.me.shadaj.slinky.core.Writer[$propsType]])"
)
}
}
abstract class ExternalComponent {
type Props
val component: js.Object
def apply(p: Props, key: String = null, ref: js.Object => Unit = null): BuildingComponent[Props] = {
new BuildingComponent(this, p, key, ref)
}
}
|
8b4b749eda3adce84db655abcc554f6c7ca38d4d | salt/roots/dotfiles/init.sls | salt/roots/dotfiles/init.sls | dotfiles-directory:
file.directory:
- user: wicksy
- group: wicksy
- mode: 755
- makedirs : True
- require:
- sls: users
- names:
- /home/wicksy/git
- /home/wicksy/git/configfiles
dotgiles-github:
git.latest:
- name: https://github.com/wicksy/configfiles.git
- branch: master
- user: wicksy
- target: /home/wicksy/git/configfiles
- require:
- file: dotfiles-directory
{%- for user in ['root','wicksy'] %}
{%- for file in ['.vimrc', '.vim', '.bashrc'] %}
dotfiles-{{ user }}-{{ file }}-link:
file.symlink:
{%- if user == "root" %}
- name: /{{ user }}/{{ file }}
{%- else %}
- name: /home/{{ user }}/{{ file }}
{%- endif %}
- target: /home/wicksy/git/configfiles/dotfiles/{{ file }}
- force: True
- user: wicksy
- group: wicksy
- mode: 644
{% endfor %}
{% endfor %}
| dotfiles-directory:
file.directory:
- user: root
- group: root
- mode: 755
- makedirs: True
- recurse:
- user
- group
- names:
- /git
- /git/wicksy
dotgiles-github:
git.latest:
- name: git@github.com:wicksy/configfiles.git
- branch: master
- user: root
- target: /git/wicksy/configfiles
- require:
- file: dotfiles-directory
{%- for user in ['root','wicksy'] %}
{%- for file in ['.vimrc', '.vim', '.bashrc'] %}
dotfiles-{{ user }}-{{ file }}-link:
file.symlink:
{%- if user == "root" %}
- name: /{{ user }}/{{ file }}
{%- else %}
- name: /home/{{ user }}/{{ file }}
{%- endif %}
- target: /git/wicksy/configfiles/dotfiles/{{ file }}
- force: True
- user: wicksy
- group: wicksy
- mode: 644
- require:
- sls: users
{% endfor %}
{% endfor %}
| Rework dotfiles to pull via ssh and into /git/wicksy not wicksy /root | Rework dotfiles to pull via ssh and into /git/wicksy not wicksy /root
| SaltStack | mit | wicksy/laptop-build,wicksy/laptop-build,wicksy/laptop-build,wicksy/laptop-build | saltstack | ## Code Before:
dotfiles-directory:
file.directory:
- user: wicksy
- group: wicksy
- mode: 755
- makedirs : True
- require:
- sls: users
- names:
- /home/wicksy/git
- /home/wicksy/git/configfiles
dotgiles-github:
git.latest:
- name: https://github.com/wicksy/configfiles.git
- branch: master
- user: wicksy
- target: /home/wicksy/git/configfiles
- require:
- file: dotfiles-directory
{%- for user in ['root','wicksy'] %}
{%- for file in ['.vimrc', '.vim', '.bashrc'] %}
dotfiles-{{ user }}-{{ file }}-link:
file.symlink:
{%- if user == "root" %}
- name: /{{ user }}/{{ file }}
{%- else %}
- name: /home/{{ user }}/{{ file }}
{%- endif %}
- target: /home/wicksy/git/configfiles/dotfiles/{{ file }}
- force: True
- user: wicksy
- group: wicksy
- mode: 644
{% endfor %}
{% endfor %}
## Instruction:
Rework dotfiles to pull via ssh and into /git/wicksy not wicksy /root
## Code After:
dotfiles-directory:
file.directory:
- user: root
- group: root
- mode: 755
- makedirs: True
- recurse:
- user
- group
- names:
- /git
- /git/wicksy
dotgiles-github:
git.latest:
- name: git@github.com:wicksy/configfiles.git
- branch: master
- user: root
- target: /git/wicksy/configfiles
- require:
- file: dotfiles-directory
{%- for user in ['root','wicksy'] %}
{%- for file in ['.vimrc', '.vim', '.bashrc'] %}
dotfiles-{{ user }}-{{ file }}-link:
file.symlink:
{%- if user == "root" %}
- name: /{{ user }}/{{ file }}
{%- else %}
- name: /home/{{ user }}/{{ file }}
{%- endif %}
- target: /git/wicksy/configfiles/dotfiles/{{ file }}
- force: True
- user: wicksy
- group: wicksy
- mode: 644
- require:
- sls: users
{% endfor %}
{% endfor %}
|
027d005057d1c1e3ab000481d27aeb60039c943c | resources/views/partials/inputs/tags.blade.php | resources/views/partials/inputs/tags.blade.php | @section("field")
<div class="select2-tags">
<select name="{{ $prefixed_field_name }}[]" class="form-control" id="{{ $field_id }}" multiple>
@foreach((array) $field_value as $value)
<option value="{{ $value }}" selected>{{ $value }}</option>
@endforeach
</select>
</div>
@overwrite
@section('footer.js')
@parent
<script>
$(function(){
$('#{{ $field_id }}').select2({
tags: true,
multiple: true,
//dropdownCssClass: 'hide',
{!! app('soda.form')->buildJsParams($field_parameters['settings']) !!}
});
});
</script>
@stop
| @section("field")
<div class="select2-tags">
<select name="{{ $prefixed_field_name }}[]" class="form-control" id="{{ $field_id }}" multiple>
@foreach((array) $field_value as $value)
<option value="{{ $value }}" selected>{{ $value }}</option>
@endforeach
</select>
</div>
@overwrite
@section('footer.js')
@parent
<script>
$(function(){
$('#{{ $field_id }}').select2({
tags: true,
multiple: true,
selectOnClose: true,
matcher: function(searchParams, data) {
// This bit taken from Select2's default matcher
var match = $.extend(true, {}, data);
// Don't partial match tags, otherwise if a user has a tag 'abc' it is
// impossible to then create a tag 'ab'.
if (searchParams.term === data.text)
return match;
return null;
},
dropdownCssClass: 'hide',
{!! app('soda.form')->buildJsParams($field_parameters['settings']) !!}
});
$('#{{ $field_id }}').on('select2:unselect', function(e){
$(e.params.data.element).remove();
});
});
</script>
@stop
| Fix select2 tags, keeping the dropdown hidden | Fix select2 tags, keeping the dropdown hidden
| PHP | mit | sodacms/sodacms,soda-framework/cms,sodacms/sodacms,soda-framework/cms,sodacms/sodacms,sodacms/sodacms,soda-framework/cms,sodacms/sodacms | php | ## Code Before:
@section("field")
<div class="select2-tags">
<select name="{{ $prefixed_field_name }}[]" class="form-control" id="{{ $field_id }}" multiple>
@foreach((array) $field_value as $value)
<option value="{{ $value }}" selected>{{ $value }}</option>
@endforeach
</select>
</div>
@overwrite
@section('footer.js')
@parent
<script>
$(function(){
$('#{{ $field_id }}').select2({
tags: true,
multiple: true,
//dropdownCssClass: 'hide',
{!! app('soda.form')->buildJsParams($field_parameters['settings']) !!}
});
});
</script>
@stop
## Instruction:
Fix select2 tags, keeping the dropdown hidden
## Code After:
@section("field")
<div class="select2-tags">
<select name="{{ $prefixed_field_name }}[]" class="form-control" id="{{ $field_id }}" multiple>
@foreach((array) $field_value as $value)
<option value="{{ $value }}" selected>{{ $value }}</option>
@endforeach
</select>
</div>
@overwrite
@section('footer.js')
@parent
<script>
$(function(){
$('#{{ $field_id }}').select2({
tags: true,
multiple: true,
selectOnClose: true,
matcher: function(searchParams, data) {
// This bit taken from Select2's default matcher
var match = $.extend(true, {}, data);
// Don't partial match tags, otherwise if a user has a tag 'abc' it is
// impossible to then create a tag 'ab'.
if (searchParams.term === data.text)
return match;
return null;
},
dropdownCssClass: 'hide',
{!! app('soda.form')->buildJsParams($field_parameters['settings']) !!}
});
$('#{{ $field_id }}').on('select2:unselect', function(e){
$(e.params.data.element).remove();
});
});
</script>
@stop
|
94a106b27ca0e76ed4f7d4f3102ac152aaad1b62 | lib/with_model.rb | lib/with_model.rb | require 'with_model/model'
require 'with_model/model/dsl'
require 'with_model/table'
require 'with_model/version'
module WithModel
def with_model(name, options = {}, &block)
options = options.dup
scope = options.delete(:scope) { :each }
model = Model.new name, options
dsl = Model::DSL.new model
dsl.instance_exec(&block) if block
before scope do
model.create
end
after scope do
model.destroy
end
end
def with_table(name, options = {}, &block)
options = options.dup
scope = options.delete(:scope) { :each }
table = Table.new name, options, &block
before scope do
table.create
end
after scope do
table.destroy
end
end
end
| require 'with_model/model'
require 'with_model/model/dsl'
require 'with_model/table'
require 'with_model/version'
module WithModel
def with_model(name, options = {}, &block)
options = options.dup
scope = options.delete(:scope)
model = Model.new name, options
dsl = Model::DSL.new model
dsl.instance_exec(&block) if block
before(*scope) do
model.create
end
after(*scope) do
model.destroy
end
end
def with_table(name, options = {}, &block)
options = options.dup
scope = options.delete(:scope)
table = Table.new name, options, &block
before(*scope) do
table.create
end
after(*scope) do
table.destroy
end
end
end
| Allow before/after to naturally use their own defaults | Allow before/after to naturally use their own defaults
Instead of explicitly specifying that it should be :each.
(#18)
| Ruby | mit | Casecommons/with_model | ruby | ## Code Before:
require 'with_model/model'
require 'with_model/model/dsl'
require 'with_model/table'
require 'with_model/version'
module WithModel
def with_model(name, options = {}, &block)
options = options.dup
scope = options.delete(:scope) { :each }
model = Model.new name, options
dsl = Model::DSL.new model
dsl.instance_exec(&block) if block
before scope do
model.create
end
after scope do
model.destroy
end
end
def with_table(name, options = {}, &block)
options = options.dup
scope = options.delete(:scope) { :each }
table = Table.new name, options, &block
before scope do
table.create
end
after scope do
table.destroy
end
end
end
## Instruction:
Allow before/after to naturally use their own defaults
Instead of explicitly specifying that it should be :each.
(#18)
## Code After:
require 'with_model/model'
require 'with_model/model/dsl'
require 'with_model/table'
require 'with_model/version'
module WithModel
def with_model(name, options = {}, &block)
options = options.dup
scope = options.delete(:scope)
model = Model.new name, options
dsl = Model::DSL.new model
dsl.instance_exec(&block) if block
before(*scope) do
model.create
end
after(*scope) do
model.destroy
end
end
def with_table(name, options = {}, &block)
options = options.dup
scope = options.delete(:scope)
table = Table.new name, options, &block
before(*scope) do
table.create
end
after(*scope) do
table.destroy
end
end
end
|
67e89526d5ff95902f98aa9a671d1681eeee2f9d | cas-mfa-java/README.md | cas-mfa-java/README.md |
This module is for the Java source code of the multifactor authentication extensions for CAS.
It builds to a .jar.
This module is intended to include all the Java you need to add to a CAS implementation to take advantage of the extended multifactor authentication features in this project.
Of course, it's kind of useless all by itself, since you also need Web application components, which live in the parallel `cas-mfa-web` directory. Those Web components depend on these Java components.
This is kind of complicated and may not be the final answer here.
## How do I build it?
*Good question*. To be defined and documented. |
This module is for the Java source code of the multifactor authentication extensions for CAS.
It builds to a .jar.
This module is intended to include all the Java you need to add to a CAS implementation to take advantage of the extended multifactor authentication features in this project.
Of course, it's kind of useless all by itself, since you also need Web application components, which live in the parallel `cas-mfa-web` directory. Those Web components depend on these Java components.
This is kind of complicated and may not be the final answer here.
## How do I build it?
In this directory, run
mvn package
This will yield a `target` directory containing, among other artifacts, a `cas-mfa-java-{VERSION}.jar`, where {VERSION} is, as of this writing, "0.0.1-SNAPSHOT". As in, `cas-mfa-java-0.0.1-SNAPSHOT.jar`.
You'd then include that .jar in an application, e.g. by declaring it as a Maven dependency in a `pom.xml`.
The `cas-mfa-web` project does this, and the top level (up one directory) `pom.xml` automates first building this .jar and then making use of it in the other (i.e., .war) artifacts it builds.
| Document how to build cas-mfa-java | Document how to build cas-mfa-java
| Markdown | apache-2.0 | BYU-OIT/cas-mfa,Unicon/cas-mfa,byu-oit-appdev/cas-mfa,bzfoster/cas-mfa,bzfoster/cas-mfa,bzfoster/cas-mfa,BYU-OIT/cas-mfa,byu-oit-appdev/cas-mfa,Unicon/cas-mfa,BYU-OIT/cas-mfa,Unicon/cas-mfa,byu-oit-appdev/cas-mfa | markdown | ## Code Before:
This module is for the Java source code of the multifactor authentication extensions for CAS.
It builds to a .jar.
This module is intended to include all the Java you need to add to a CAS implementation to take advantage of the extended multifactor authentication features in this project.
Of course, it's kind of useless all by itself, since you also need Web application components, which live in the parallel `cas-mfa-web` directory. Those Web components depend on these Java components.
This is kind of complicated and may not be the final answer here.
## How do I build it?
*Good question*. To be defined and documented.
## Instruction:
Document how to build cas-mfa-java
## Code After:
This module is for the Java source code of the multifactor authentication extensions for CAS.
It builds to a .jar.
This module is intended to include all the Java you need to add to a CAS implementation to take advantage of the extended multifactor authentication features in this project.
Of course, it's kind of useless all by itself, since you also need Web application components, which live in the parallel `cas-mfa-web` directory. Those Web components depend on these Java components.
This is kind of complicated and may not be the final answer here.
## How do I build it?
In this directory, run
mvn package
This will yield a `target` directory containing, among other artifacts, a `cas-mfa-java-{VERSION}.jar`, where {VERSION} is, as of this writing, "0.0.1-SNAPSHOT". As in, `cas-mfa-java-0.0.1-SNAPSHOT.jar`.
You'd then include that .jar in an application, e.g. by declaring it as a Maven dependency in a `pom.xml`.
The `cas-mfa-web` project does this, and the top level (up one directory) `pom.xml` automates first building this .jar and then making use of it in the other (i.e., .war) artifacts it builds.
|
3e5843c00cab61d8ad51eef05d72c44f7be96e8f | package.json | package.json | {
"name": "abc-environment",
"version": "1.0.2",
"description": "Get started with modern JavaScript development as simple as the ABC",
"main": "src/index.js",
"bin": {
"abc": "src/index.js"
},
"author": "queicherius@gmail.com",
"license": "MIT",
"repository": {
"type": "git",
"url": "https://github.com/queicherius/abc-environment"
},
"dependencies": {
"babel-cli": "^6.11.4",
"babel-eslint": "^6.1.2",
"babel-plugin-istanbul": "^2.0.0",
"babel-plugin-rewire": "^1.0.0",
"babel-preset-latest": "^6.16.0",
"babel-preset-stage-0": "^6.5.0",
"chai": "^3.5.0",
"chalk": "^1.1.3",
"codecov": "^1.0.1",
"exec-sh": "^0.2.0",
"mkdirp": "^0.5.1",
"mocha": "^3.0.2",
"nyc": "^8.1.0",
"snazzy": "^4.0.1"
}
}
| {
"name": "abc-environment",
"version": "1.0.2",
"description": "Get started with modern JavaScript development as simple as the ABC",
"main": "src/index.js",
"bin": {
"abc": "src/index.js"
},
"author": "queicherius@gmail.com",
"license": "MIT",
"repository": {
"type": "git",
"url": "https://github.com/queicherius/abc-environment"
},
"dependencies": {
"babel-cli": "6.16.0",
"babel-eslint": "7.0.0",
"babel-plugin-istanbul": "2.0.2",
"babel-plugin-rewire": "1.0.0",
"babel-preset-latest": "6.16.0",
"babel-preset-stage-0": "6.16.0",
"chai": "3.5.0",
"chalk": "1.1.3",
"codecov": "1.0.1",
"exec-sh": "0.2.0",
"mkdirp": "0.5.1",
"mocha": "3.1.0",
"nyc": "8.3.0",
"snazzy": "5.0.0"
}
}
| Make sure the dependencies dont break | Make sure the dependencies dont break
| JSON | mit | queicherius/abc-environment,queicherius/abc-environment | json | ## Code Before:
{
"name": "abc-environment",
"version": "1.0.2",
"description": "Get started with modern JavaScript development as simple as the ABC",
"main": "src/index.js",
"bin": {
"abc": "src/index.js"
},
"author": "queicherius@gmail.com",
"license": "MIT",
"repository": {
"type": "git",
"url": "https://github.com/queicherius/abc-environment"
},
"dependencies": {
"babel-cli": "^6.11.4",
"babel-eslint": "^6.1.2",
"babel-plugin-istanbul": "^2.0.0",
"babel-plugin-rewire": "^1.0.0",
"babel-preset-latest": "^6.16.0",
"babel-preset-stage-0": "^6.5.0",
"chai": "^3.5.0",
"chalk": "^1.1.3",
"codecov": "^1.0.1",
"exec-sh": "^0.2.0",
"mkdirp": "^0.5.1",
"mocha": "^3.0.2",
"nyc": "^8.1.0",
"snazzy": "^4.0.1"
}
}
## Instruction:
Make sure the dependencies dont break
## Code After:
{
"name": "abc-environment",
"version": "1.0.2",
"description": "Get started with modern JavaScript development as simple as the ABC",
"main": "src/index.js",
"bin": {
"abc": "src/index.js"
},
"author": "queicherius@gmail.com",
"license": "MIT",
"repository": {
"type": "git",
"url": "https://github.com/queicherius/abc-environment"
},
"dependencies": {
"babel-cli": "6.16.0",
"babel-eslint": "7.0.0",
"babel-plugin-istanbul": "2.0.2",
"babel-plugin-rewire": "1.0.0",
"babel-preset-latest": "6.16.0",
"babel-preset-stage-0": "6.16.0",
"chai": "3.5.0",
"chalk": "1.1.3",
"codecov": "1.0.1",
"exec-sh": "0.2.0",
"mkdirp": "0.5.1",
"mocha": "3.1.0",
"nyc": "8.3.0",
"snazzy": "5.0.0"
}
}
|
f03a2f0514aecd15676208788c6106ff2c14bc0e | circle.yml | circle.yml | machine:
node:
version: 7.9.0
dependencies:
override:
- npm install -g istanbul
- npm install -g yarn
- yarn install
general:
artifacts:
- ./coverage
deployment:
dev:
branch: dev
commands:
- git config user.email "web.jam.adm@gmail.com"
- git config user.name "circleci"
- "[[ ! -s \"$(git rev-parse --git-dir)/shallow\" ]] || git fetch --unshallow"
- git commit --allow-empty -m "Force heroku dev rebuild"
- git push -f git@heroku.com:web-jam-back-dev.git HEAD:master
master:
branch: master
commands:
- git config user.email "web.jam.adm@gmail.com"
- git config user.name "circleci"
- "[[ ! -s \"$(git rev-parse --git-dir)/shallow\" ]] || git fetch --unshallow"
- git commit --allow-empty -m "Force heroku master rebuild"
- git push -f git@heroku.com:webjamsalem.git HEAD:master
| machine:
node:
version: 7.9.0
dependencies:
pre:
- npm install -g istanbul yarn codeclimate-test-reporter
override:
- yarn install
test:
post:
- codeclimate-test-reporter < coverage/lcov.info
general:
artifacts:
- ./coverage
deployment:
dev:
branch: dev
commands:
- git config user.email "web.jam.adm@gmail.com"
- git config user.name "circleci"
- "[[ ! -s \"$(git rev-parse --git-dir)/shallow\" ]] || git fetch --unshallow"
- git commit --allow-empty -m "Force heroku dev rebuild"
- git push -f git@heroku.com:web-jam-back-dev.git HEAD:master
master:
branch: master
commands:
- git config user.email "web.jam.adm@gmail.com"
- git config user.name "circleci"
- "[[ ! -s \"$(git rev-parse --git-dir)/shallow\" ]] || git fetch --unshallow"
- git commit --allow-empty -m "Force heroku master rebuild"
- git push -f git@heroku.com:webjamsalem.git HEAD:master
| Add codeclimate test coverage reporting | Add codeclimate test coverage reporting
| YAML | mit | WebJamApps/web-jam-back,WebJamApps/web-jam-back | yaml | ## Code Before:
machine:
node:
version: 7.9.0
dependencies:
override:
- npm install -g istanbul
- npm install -g yarn
- yarn install
general:
artifacts:
- ./coverage
deployment:
dev:
branch: dev
commands:
- git config user.email "web.jam.adm@gmail.com"
- git config user.name "circleci"
- "[[ ! -s \"$(git rev-parse --git-dir)/shallow\" ]] || git fetch --unshallow"
- git commit --allow-empty -m "Force heroku dev rebuild"
- git push -f git@heroku.com:web-jam-back-dev.git HEAD:master
master:
branch: master
commands:
- git config user.email "web.jam.adm@gmail.com"
- git config user.name "circleci"
- "[[ ! -s \"$(git rev-parse --git-dir)/shallow\" ]] || git fetch --unshallow"
- git commit --allow-empty -m "Force heroku master rebuild"
- git push -f git@heroku.com:webjamsalem.git HEAD:master
## Instruction:
Add codeclimate test coverage reporting
## Code After:
machine:
node:
version: 7.9.0
dependencies:
pre:
- npm install -g istanbul yarn codeclimate-test-reporter
override:
- yarn install
test:
post:
- codeclimate-test-reporter < coverage/lcov.info
general:
artifacts:
- ./coverage
deployment:
dev:
branch: dev
commands:
- git config user.email "web.jam.adm@gmail.com"
- git config user.name "circleci"
- "[[ ! -s \"$(git rev-parse --git-dir)/shallow\" ]] || git fetch --unshallow"
- git commit --allow-empty -m "Force heroku dev rebuild"
- git push -f git@heroku.com:web-jam-back-dev.git HEAD:master
master:
branch: master
commands:
- git config user.email "web.jam.adm@gmail.com"
- git config user.name "circleci"
- "[[ ! -s \"$(git rev-parse --git-dir)/shallow\" ]] || git fetch --unshallow"
- git commit --allow-empty -m "Force heroku master rebuild"
- git push -f git@heroku.com:webjamsalem.git HEAD:master
|
b573d5ad08cfc3a909d62aa9464bfdc4b97c452a | config/config.go | config/config.go | package config
import (
"encoding/json"
"fmt"
"io/ioutil"
"log"
"os"
)
type Configuration struct {
Username string `json:"username"`
Password string
RememberMe bool `json:"remember_me"`
}
func configWizard() *Configuration {
configuration := new(Configuration)
fmt.Println("Welcome to gotify !\nThis wizard will help you set up gotify, follow it carefully !")
StartWizard(configuration)
return configuration
}
func LoadConfig() *Configuration {
if _, err := os.Stat("config.json"); os.IsNotExist(err) {
configuration := configWizard()
return configuration
}
file, err := ioutil.ReadFile("config.json")
if err != nil {
log.Fatal(err)
}
configuration := new(Configuration)
err = json.Unmarshal(file, &configuration)
if err != nil {
log.Fatal(err)
}
return configuration
}
| package config
import (
"encoding/json"
"fmt"
"io/ioutil"
"log"
"os"
)
type Configuration struct {
Username string `json:"username"`
Password string
RememberMe bool `json:"remember_me"`
}
// Starts the wizard config
func configWizard() *Configuration {
configuration := new(Configuration)
fmt.Println("Welcome to gotify !\nThis wizard will help you set up gotify, follow it carefully !")
StartWizard(configuration)
err := saveConfig(configuration)
if err != nil {
log.Fatal(err)
}
return configuration
}
// Save the configuration in the config.json file
func saveConfig(configuration *Configuration) error {
config, err := json.Marshal(configuration)
if err != nil {
return err
}
err = ioutil.WriteFile("config.json", config, 0644)
return err
}
// Load the configuration from config.json or launch the wizard if it does not exists
func LoadConfig() *Configuration {
if _, err := os.Stat("config.json"); os.IsNotExist(err) {
configuration := configWizard()
return configuration
}
file, err := ioutil.ReadFile("config.json")
if err != nil {
log.Fatal(err)
}
configuration := new(Configuration)
err = json.Unmarshal(file, &configuration)
if err != nil {
log.Fatal(err)
}
return configuration
}
| Add some docstring and the saveConfiguration method | Add some docstring and the saveConfiguration method
| Go | mit | adrien-f/gotify | go | ## Code Before:
package config
import (
"encoding/json"
"fmt"
"io/ioutil"
"log"
"os"
)
type Configuration struct {
Username string `json:"username"`
Password string
RememberMe bool `json:"remember_me"`
}
func configWizard() *Configuration {
configuration := new(Configuration)
fmt.Println("Welcome to gotify !\nThis wizard will help you set up gotify, follow it carefully !")
StartWizard(configuration)
return configuration
}
func LoadConfig() *Configuration {
if _, err := os.Stat("config.json"); os.IsNotExist(err) {
configuration := configWizard()
return configuration
}
file, err := ioutil.ReadFile("config.json")
if err != nil {
log.Fatal(err)
}
configuration := new(Configuration)
err = json.Unmarshal(file, &configuration)
if err != nil {
log.Fatal(err)
}
return configuration
}
## Instruction:
Add some docstring and the saveConfiguration method
## Code After:
package config
import (
"encoding/json"
"fmt"
"io/ioutil"
"log"
"os"
)
type Configuration struct {
Username string `json:"username"`
Password string
RememberMe bool `json:"remember_me"`
}
// Starts the wizard config
func configWizard() *Configuration {
configuration := new(Configuration)
fmt.Println("Welcome to gotify !\nThis wizard will help you set up gotify, follow it carefully !")
StartWizard(configuration)
err := saveConfig(configuration)
if err != nil {
log.Fatal(err)
}
return configuration
}
// Save the configuration in the config.json file
func saveConfig(configuration *Configuration) error {
config, err := json.Marshal(configuration)
if err != nil {
return err
}
err = ioutil.WriteFile("config.json", config, 0644)
return err
}
// Load the configuration from config.json or launch the wizard if it does not exists
func LoadConfig() *Configuration {
if _, err := os.Stat("config.json"); os.IsNotExist(err) {
configuration := configWizard()
return configuration
}
file, err := ioutil.ReadFile("config.json")
if err != nil {
log.Fatal(err)
}
configuration := new(Configuration)
err = json.Unmarshal(file, &configuration)
if err != nil {
log.Fatal(err)
}
return configuration
}
|
28bbf60b5c157e9c6405db1a80de5e05f42cc10a | page/4about.md | page/4about.md | ---
layout: page
title: About
permalink: /about/
icon: heart
type: page
---
* content
{:toc}
## Briefs of An Engineer
I'm ZWF
-----
Research engineer and Team Leader(2017.4-Now)
-----
Tsinghua Tongfang Knowledge Network Technology Co., Ltd. (Beijing)
## Beliefs of The engineer
Life is beautiful!
----
Wings up! Rock & roll!
## Comments
Yeah, actually there is nothing to comment.
| ---
layout: page
title: About
permalink: /about/
icon: heart
type: page
---
## Briefs of An Engineer
>Research engineer/Team Leader(2017.4-Now)
>Tsinghua Tongfang Knowledge Network Technology Co., Ltd. (Beijing)
## Beliefs of The engineer
>Life is beautiful!
>Wings up! Rock & roll!
## Comments
>Yeah, actually there is nothing to comment.
| CHANGE THE INFO ABOUT MYSELF | CHANGE THE INFO ABOUT MYSELF
| Markdown | mit | zhangwenfan123/zwf,zhangwenfan123/zwf | markdown | ## Code Before:
---
layout: page
title: About
permalink: /about/
icon: heart
type: page
---
* content
{:toc}
## Briefs of An Engineer
I'm ZWF
-----
Research engineer and Team Leader(2017.4-Now)
-----
Tsinghua Tongfang Knowledge Network Technology Co., Ltd. (Beijing)
## Beliefs of The engineer
Life is beautiful!
----
Wings up! Rock & roll!
## Comments
Yeah, actually there is nothing to comment.
## Instruction:
CHANGE THE INFO ABOUT MYSELF
## Code After:
---
layout: page
title: About
permalink: /about/
icon: heart
type: page
---
## Briefs of An Engineer
>Research engineer/Team Leader(2017.4-Now)
>Tsinghua Tongfang Knowledge Network Technology Co., Ltd. (Beijing)
## Beliefs of The engineer
>Life is beautiful!
>Wings up! Rock & roll!
## Comments
>Yeah, actually there is nothing to comment.
|
15b3e34cb731762b4b861ffda6b9a12b2855474b | lib/sous_chef/recipe.rb | lib/sous_chef/recipe.rb | module SousChef
class Recipe
def initialize(*flags, &block)
@flags = flags
@resources = []
instance_eval(&block)
end
def to_script
@resources.map do |resource|
@context = resource
script = ""
script << %{# #{resource.name}\n} if verbose? && resource.name
script << resource.to_script
end.join("\n\n")
end
def verbose?
@flags.include?(:verbose)
end
def execute(name = nil, &block)
@resources << Resource::Execute.new(self, name, &block)
end
protected
def method_missing(meth, *args, &block)
if @context && @context.resource_respond_to?(meth)
@context.__send__(meth, *args, &block)
else
super
end
end
end
end
| module SousChef
class Recipe
def initialize(*flags, &block)
@flags = flags
@resources = []
instance_eval(&block)
end
def to_script
@resources.map do |resource|
@context = resource
script = ""
script << %{# #{resource.name}\n} if verbose? && resource.name
script << resource.to_script
end.join("\n\n")
end
def verbose?
@flags.include?(:verbose)
end
def execute(*args, &block)
@resources << Resource::Execute.new(self, *args, &block)
end
def file(*args, &block)
@resources << Resource::File.new(self, *args, &block)
end
def directory(*args, &block)
@resources << Resource::Directory.new(self, *args, &block)
end
end
protected
def method_missing(meth, *args, &block)
if @context && @context.resource_respond_to?(meth)
@context.__send__(meth, *args, &block)
else
super
end
end
end
end
| Add file and directory methods to Recipe. | Add file and directory methods to Recipe.
| Ruby | mit | engineyard/sous_chef,engineyard/sous_chef | ruby | ## Code Before:
module SousChef
class Recipe
def initialize(*flags, &block)
@flags = flags
@resources = []
instance_eval(&block)
end
def to_script
@resources.map do |resource|
@context = resource
script = ""
script << %{# #{resource.name}\n} if verbose? && resource.name
script << resource.to_script
end.join("\n\n")
end
def verbose?
@flags.include?(:verbose)
end
def execute(name = nil, &block)
@resources << Resource::Execute.new(self, name, &block)
end
protected
def method_missing(meth, *args, &block)
if @context && @context.resource_respond_to?(meth)
@context.__send__(meth, *args, &block)
else
super
end
end
end
end
## Instruction:
Add file and directory methods to Recipe.
## Code After:
module SousChef
class Recipe
def initialize(*flags, &block)
@flags = flags
@resources = []
instance_eval(&block)
end
def to_script
@resources.map do |resource|
@context = resource
script = ""
script << %{# #{resource.name}\n} if verbose? && resource.name
script << resource.to_script
end.join("\n\n")
end
def verbose?
@flags.include?(:verbose)
end
def execute(*args, &block)
@resources << Resource::Execute.new(self, *args, &block)
end
def file(*args, &block)
@resources << Resource::File.new(self, *args, &block)
end
def directory(*args, &block)
@resources << Resource::Directory.new(self, *args, &block)
end
end
protected
def method_missing(meth, *args, &block)
if @context && @context.resource_respond_to?(meth)
@context.__send__(meth, *args, &block)
else
super
end
end
end
end
|
baebe45349b6e7daa0ecdcc42d97066792dbefd3 | etc/phantomjs/exec.js | etc/phantomjs/exec.js | /*
*
*/
var cordova = require('cordova');
var execProxy = require('cordova/exec/proxy');
module.exports = function (success, fail, service, action, args) {
if ((service === 'Device') && (action === 'getDeviceInfo')) {
success({
platform: 'PhantomJS',
version: '1.9.7',
uuid: 'BD905752-CCBB-4C35-BF7D-178E2F7930B8',
model: ''
});
} else if (success) {
success();
}
};
| /*
*
*/
var cordova = require('cordova');
var execProxy = require('cordova/exec/proxy');
module.exports = function (success, fail, service, action, args) {
if ((service === 'Device') && (action === 'getDeviceInfo')) {
success({
platform: 'PhantomJS',
version: '1.9.7',
uuid: 'BD905752-CCBB-4C35-BF7D-178E2F7930B8',
model: ''
});
} else if ((service === 'NetworkStatus') && (action === 'getConnectionInfo')) {
success('wifi');
} else if (success) {
success();
}
};
| Return wifi as network status. | Return wifi as network status.
| JavaScript | mit | TeamPraxis/grunt-steroids-setup | javascript | ## Code Before:
/*
*
*/
var cordova = require('cordova');
var execProxy = require('cordova/exec/proxy');
module.exports = function (success, fail, service, action, args) {
if ((service === 'Device') && (action === 'getDeviceInfo')) {
success({
platform: 'PhantomJS',
version: '1.9.7',
uuid: 'BD905752-CCBB-4C35-BF7D-178E2F7930B8',
model: ''
});
} else if (success) {
success();
}
};
## Instruction:
Return wifi as network status.
## Code After:
/*
*
*/
var cordova = require('cordova');
var execProxy = require('cordova/exec/proxy');
module.exports = function (success, fail, service, action, args) {
if ((service === 'Device') && (action === 'getDeviceInfo')) {
success({
platform: 'PhantomJS',
version: '1.9.7',
uuid: 'BD905752-CCBB-4C35-BF7D-178E2F7930B8',
model: ''
});
} else if ((service === 'NetworkStatus') && (action === 'getConnectionInfo')) {
success('wifi');
} else if (success) {
success();
}
};
|
75c1dedb6eddfcb540ee29de5ae31b99d9927d07 | reddit/admin.py | reddit/admin.py | from django.contrib import admin
from reddit.models import RedditAccount
from reddit.forms import RedditAccountForm
from datetime import date
class RedditAccountAdmin(admin.ModelAdmin):
list_display = ('username', 'user', 'date_created', 'link_karma', 'comment_karma', 'last_update', 'is_valid')
search_fields = ['username', 'user']
fields = ('user', 'username')
form = RedditAccountForm
def is_valid(self, obj):
if not obj.date_created:
return False
# Account 3 months old?
if (date.today() - obj.date_created.date()).days >= 90:
return True
# Account created after 9/2/10 and before 13/2/10
if obj.date_created.date() >= date(2010, 2, 9) and obj.date_created.date() <= date(2010, 2, 13):
return True
return False
is_valid.short_description = 'Dreddit Eligible'
is_valid.boolean = True
def save_model(self, request, obj, form, change):
obj.api_update()
obj.save()
admin.site.register(RedditAccount, RedditAccountAdmin)
| from django.contrib import admin
from reddit.models import RedditAccount
from reddit.forms import RedditAccountForm
from datetime import date
class RedditAccountAdmin(admin.ModelAdmin):
list_display = ('username', 'user', 'date_created', 'link_karma', 'comment_karma', 'last_update', 'validated', 'is_valid')
search_fields = ['username']
fields = ('user', 'username')
form = RedditAccountForm
def is_valid(self, obj):
if not obj.date_created:
return False
# Account 3 months old?
if (date.today() - obj.date_created.date()).days >= 90:
return True
# Account created after 9/2/10 and before 13/2/10
if obj.date_created.date() >= date(2010, 2, 9) and obj.date_created.date() <= date(2010, 2, 13):
return True
return False
is_valid.short_description = 'Dreddit Eligible'
is_valid.boolean = True
def save_model(self, request, obj, form, change):
obj.api_update()
obj.save()
admin.site.register(RedditAccount, RedditAccountAdmin)
| Add validation details to the Admin interface | Add validation details to the Admin interface
| Python | bsd-3-clause | nikdoof/test-auth | python | ## Code Before:
from django.contrib import admin
from reddit.models import RedditAccount
from reddit.forms import RedditAccountForm
from datetime import date
class RedditAccountAdmin(admin.ModelAdmin):
list_display = ('username', 'user', 'date_created', 'link_karma', 'comment_karma', 'last_update', 'is_valid')
search_fields = ['username', 'user']
fields = ('user', 'username')
form = RedditAccountForm
def is_valid(self, obj):
if not obj.date_created:
return False
# Account 3 months old?
if (date.today() - obj.date_created.date()).days >= 90:
return True
# Account created after 9/2/10 and before 13/2/10
if obj.date_created.date() >= date(2010, 2, 9) and obj.date_created.date() <= date(2010, 2, 13):
return True
return False
is_valid.short_description = 'Dreddit Eligible'
is_valid.boolean = True
def save_model(self, request, obj, form, change):
obj.api_update()
obj.save()
admin.site.register(RedditAccount, RedditAccountAdmin)
## Instruction:
Add validation details to the Admin interface
## Code After:
from django.contrib import admin
from reddit.models import RedditAccount
from reddit.forms import RedditAccountForm
from datetime import date
class RedditAccountAdmin(admin.ModelAdmin):
list_display = ('username', 'user', 'date_created', 'link_karma', 'comment_karma', 'last_update', 'validated', 'is_valid')
search_fields = ['username']
fields = ('user', 'username')
form = RedditAccountForm
def is_valid(self, obj):
if not obj.date_created:
return False
# Account 3 months old?
if (date.today() - obj.date_created.date()).days >= 90:
return True
# Account created after 9/2/10 and before 13/2/10
if obj.date_created.date() >= date(2010, 2, 9) and obj.date_created.date() <= date(2010, 2, 13):
return True
return False
is_valid.short_description = 'Dreddit Eligible'
is_valid.boolean = True
def save_model(self, request, obj, form, change):
obj.api_update()
obj.save()
admin.site.register(RedditAccount, RedditAccountAdmin)
|
2ae87c4cd168c534d98e7a2ad96d13078788ae6b | widgets/number_with_target/number_with_target.html | widgets/number_with_target/number_with_target.html | <a data-bind-href="link">
<div class="title-wrapper">
<h1 class="title" data-bind="title"></h1>
</div>
<div class="number-with-target">
<h2 class="value">
<p>
YTD:
<span data-bind="current | shortenedNumber | prepend prefix | append suffix"></span> /
<span data-bind="ytd_target | shortenedNumber | prepend prefix | append suffix"></span>
</p><p>
ANN:
<span data-bind="current | shortenedNumber | prepend prefix | append suffix"></span> /
<span data-bind="annual_target | shortenedNumber | prepend prefix | append suffix"></span>
</p>
</h2>
</div>
<p class="more-info" data-bind="moreinfo | raw"></p>
<p class="updated-at" data-bind="updatedAtMessage"></p>
</a> | <a data-bind-href="link">
<div class="title-wrapper">
<h1 class="title" data-bind="title"></h1>
</div>
<div class="number-with-target">
<h2 class="value">
<!--<p>
YTD:
<span data-bind="current | shortenedNumber | prepend prefix | append suffix"></span> /
<span data-bind="ytd_target | shortenedNumber | prepend prefix | append suffix"></span>
</p>--><p>
<span data-bind="current | shortenedNumber | prepend prefix | append suffix"></span> /
<span data-bind="annual_target | shortenedNumber | prepend prefix | append suffix"></span>
</p>
</h2>
</div>
<p class="more-info" data-bind="moreinfo | raw"></p>
<p class="updated-at" data-bind="updatedAtMessage"></p>
</a> | Remove YTD targets for now | Remove YTD targets for now
For https://github.com/theodi/shared/issues/164
| HTML | mit | theodi/dashboards,theodi/dashboards,theodi/dashboards | html | ## Code Before:
<a data-bind-href="link">
<div class="title-wrapper">
<h1 class="title" data-bind="title"></h1>
</div>
<div class="number-with-target">
<h2 class="value">
<p>
YTD:
<span data-bind="current | shortenedNumber | prepend prefix | append suffix"></span> /
<span data-bind="ytd_target | shortenedNumber | prepend prefix | append suffix"></span>
</p><p>
ANN:
<span data-bind="current | shortenedNumber | prepend prefix | append suffix"></span> /
<span data-bind="annual_target | shortenedNumber | prepend prefix | append suffix"></span>
</p>
</h2>
</div>
<p class="more-info" data-bind="moreinfo | raw"></p>
<p class="updated-at" data-bind="updatedAtMessage"></p>
</a>
## Instruction:
Remove YTD targets for now
For https://github.com/theodi/shared/issues/164
## Code After:
<a data-bind-href="link">
<div class="title-wrapper">
<h1 class="title" data-bind="title"></h1>
</div>
<div class="number-with-target">
<h2 class="value">
<!--<p>
YTD:
<span data-bind="current | shortenedNumber | prepend prefix | append suffix"></span> /
<span data-bind="ytd_target | shortenedNumber | prepend prefix | append suffix"></span>
</p>--><p>
<span data-bind="current | shortenedNumber | prepend prefix | append suffix"></span> /
<span data-bind="annual_target | shortenedNumber | prepend prefix | append suffix"></span>
</p>
</h2>
</div>
<p class="more-info" data-bind="moreinfo | raw"></p>
<p class="updated-at" data-bind="updatedAtMessage"></p>
</a> |
fff0ecbd1f29bb0fbac5c57eaf117b052eda01e8 | _config.yml | _config.yml | markdown: kramdown
highlighter: pygments
url: http://jmcomets.com
lsi: false
exclude: [LICENSE, CNAME, README.md, .gitignore]
# Theme customization - please change from the defaults!
theme:
# Color for top bar, links, etc
highlight_color: '#48257f'
# Profile links on the left sidebar, leave blank to ignore
social:
github: jmcomets
#bitbucket: jmcomets
#hacker_news:
#stackexchange:
stackoverflow: 1441984
twitter: jmcomets
#facebook: jmcomets
#tumblr:
#linkedin: jean-marie-comets
#gplus:
# "Hi, I'm _______"
name: Jean-Marie Comets
email: jm_comets@hotmail.com
tagline: "weak opinions, strongly held"
# Google Analytics key, leave blank to ignore
#google_analytics_key:
# Toggle "Postings are my own" disclaimer in footer
show_disclaimer: true
# vim: ft=yaml et sw=2 sts=2
| markdown: kramdown
highlighter: pygments
url: http://jmcomets.com
lsi: false
exclude: [LICENSE, CNAME, README.md, .gitignore]
# Theme customization - please change from the defaults!
theme:
# Color for top bar, links, etc
highlight_color: '#48257f'
# Profile links on the left sidebar, leave blank to ignore
social:
github: jmcomets
#bitbucket: jmcomets
#hacker_news:
#stackexchange:
stackoverflow: 1441984
twitter: jmcomets
#facebook: jmcomets
#tumblr:
#linkedin: jean-marie-comets
#gplus:
# "Hi, I'm _______"
name: Jean-Marie Comets
email: jm_comets@hotmail.com
tagline: "weak opinions, strongly held"
# Google Analytics key, leave blank to ignore
google_analytics_key: UA-56339121-1
# Toggle "Postings are my own" disclaimer in footer
show_disclaimer: true
# vim: ft=yaml et sw=2 sts=2
| Add google analytics tracking id | Add google analytics tracking id
| YAML | mit | jmcomets/jmcomets.github.io | yaml | ## Code Before:
markdown: kramdown
highlighter: pygments
url: http://jmcomets.com
lsi: false
exclude: [LICENSE, CNAME, README.md, .gitignore]
# Theme customization - please change from the defaults!
theme:
# Color for top bar, links, etc
highlight_color: '#48257f'
# Profile links on the left sidebar, leave blank to ignore
social:
github: jmcomets
#bitbucket: jmcomets
#hacker_news:
#stackexchange:
stackoverflow: 1441984
twitter: jmcomets
#facebook: jmcomets
#tumblr:
#linkedin: jean-marie-comets
#gplus:
# "Hi, I'm _______"
name: Jean-Marie Comets
email: jm_comets@hotmail.com
tagline: "weak opinions, strongly held"
# Google Analytics key, leave blank to ignore
#google_analytics_key:
# Toggle "Postings are my own" disclaimer in footer
show_disclaimer: true
# vim: ft=yaml et sw=2 sts=2
## Instruction:
Add google analytics tracking id
## Code After:
markdown: kramdown
highlighter: pygments
url: http://jmcomets.com
lsi: false
exclude: [LICENSE, CNAME, README.md, .gitignore]
# Theme customization - please change from the defaults!
theme:
# Color for top bar, links, etc
highlight_color: '#48257f'
# Profile links on the left sidebar, leave blank to ignore
social:
github: jmcomets
#bitbucket: jmcomets
#hacker_news:
#stackexchange:
stackoverflow: 1441984
twitter: jmcomets
#facebook: jmcomets
#tumblr:
#linkedin: jean-marie-comets
#gplus:
# "Hi, I'm _______"
name: Jean-Marie Comets
email: jm_comets@hotmail.com
tagline: "weak opinions, strongly held"
# Google Analytics key, leave blank to ignore
google_analytics_key: UA-56339121-1
# Toggle "Postings are my own" disclaimer in footer
show_disclaimer: true
# vim: ft=yaml et sw=2 sts=2
|
4af05f55cae56ab6cd41a49a9d8f6765d637dc30 | tests/tests.lisp | tests/tests.lisp | (cl:defpackage #:enhanced-eval-when_tests
(:use #:cl #:parachute)
(:shadowing-import-from #:enhanced-eval-when #:eval-when))
(cl:in-package #:enhanced-eval-when_tests)
(defmacro test-passthrough (situations)
`(is equal
(macroexpand-1 '(eval-when ,situations do-it))
'(cl:eval-when ,situations do-it)))
(define-test "enhanced-eval-when"
(is equal
(macroexpand-1 '(eval-when t do-it))
'(cl:eval-when (:compile-toplevel :load-toplevel :execute)
do-it))
(test-passthrough '())
(test-passthrough '(:compile-toplevel))
(test-passthrough '(:load-toplevel))
(test-passthrough '(:execute))
(test-passthrough '(:compile-toplevel :load-toplevel))
(test-passthrough '(:load-toplevel :execute))
(test-passthrough '(:compile-toplevel :execute)))
| (cl:defpackage #:enhanced-eval-when_tests
(:use #:cl #:parachute)
(:shadowing-import-from #:enhanced-eval-when #:eval-when))
(cl:in-package #:enhanced-eval-when_tests)
(defmacro test-passthrough (situations)
`(is equal '(cl:eval-when ,situations do-it)
(macroexpand-1 '(eval-when ,situations do-it))))
(define-test "enhanced-eval-when"
(is equal
(macroexpand-1 '(eval-when t do-it))
'(cl:eval-when (:compile-toplevel :load-toplevel :execute)
do-it))
(test-passthrough '())
(test-passthrough '(:compile-toplevel))
(test-passthrough '(:load-toplevel))
(test-passthrough '(:execute))
(test-passthrough '(:compile-toplevel :load-toplevel))
(test-passthrough '(:load-toplevel :execute))
(test-passthrough '(:compile-toplevel :execute)))
| Use proper argument order for IS. | Tests: Use proper argument order for IS.
| Common Lisp | unlicense | Hexstream/enhanced-eval-when | common-lisp | ## Code Before:
(cl:defpackage #:enhanced-eval-when_tests
(:use #:cl #:parachute)
(:shadowing-import-from #:enhanced-eval-when #:eval-when))
(cl:in-package #:enhanced-eval-when_tests)
(defmacro test-passthrough (situations)
`(is equal
(macroexpand-1 '(eval-when ,situations do-it))
'(cl:eval-when ,situations do-it)))
(define-test "enhanced-eval-when"
(is equal
(macroexpand-1 '(eval-when t do-it))
'(cl:eval-when (:compile-toplevel :load-toplevel :execute)
do-it))
(test-passthrough '())
(test-passthrough '(:compile-toplevel))
(test-passthrough '(:load-toplevel))
(test-passthrough '(:execute))
(test-passthrough '(:compile-toplevel :load-toplevel))
(test-passthrough '(:load-toplevel :execute))
(test-passthrough '(:compile-toplevel :execute)))
## Instruction:
Tests: Use proper argument order for IS.
## Code After:
(cl:defpackage #:enhanced-eval-when_tests
(:use #:cl #:parachute)
(:shadowing-import-from #:enhanced-eval-when #:eval-when))
(cl:in-package #:enhanced-eval-when_tests)
(defmacro test-passthrough (situations)
`(is equal '(cl:eval-when ,situations do-it)
(macroexpand-1 '(eval-when ,situations do-it))))
(define-test "enhanced-eval-when"
(is equal
(macroexpand-1 '(eval-when t do-it))
'(cl:eval-when (:compile-toplevel :load-toplevel :execute)
do-it))
(test-passthrough '())
(test-passthrough '(:compile-toplevel))
(test-passthrough '(:load-toplevel))
(test-passthrough '(:execute))
(test-passthrough '(:compile-toplevel :load-toplevel))
(test-passthrough '(:load-toplevel :execute))
(test-passthrough '(:compile-toplevel :execute)))
|
5ea108e372d706858bc34f43844c3e50170d9229 | extensions/hibernate-search-orm-elasticsearch/deployment/src/main/resources/dev-templates/entity-types.html | extensions/hibernate-search-orm-elasticsearch/deployment/src/main/resources/dev-templates/entity-types.html | {#include main}
{#title}Index Entities{/title}
{#body}
<form method="post" enctype="application/x-www-form-urlencoded">
<input id="index" type="submit" class="btn btn-primary mb-2" value="Reindex Entities" >
<table id="table" class="table table-striped">
<thead class="thead-dark">
<tr>
<th scope="col">
<div class="custom-control">
<input type="checkbox" class="form-check-input" id="check-all">
</div>
</th>
<th scope="col">Entity type</th>
</tr>
</thead>
<tbody>
{#for entityType in info:entityTypes}
<tr>
<td>
<div class="custom-control">
<input type="checkbox" class="form-check-input checkbox" name="{entityType}" id="{entityType}">
</div>
</td>
<td>{entityType}</td>
</tr>
{/for}
</tbody>
</table>
</form>
<script type="text/javascript">
jQuery('#check-all').change(function() {
if (this.checked) {
jQuery('.checkbox').prop('checked', true);
} else {
jQuery('.checkbox').prop('checked', false);
}
});
</script>
{/body}
{/include}
| {#include main}
{#title}Index Entities{/title}
{#body}
<form method="post" enctype="application/x-www-form-urlencoded">
<input id="index" type="submit" class="btn btn-primary mb-2" value="Reindex Entities" >
<table id="table" class="table table-striped">
<colgroup>
<col span="1" style="width: 5%;">
<col span="1" style="width: 95%;">
</colgroup>
<thead class="thead-dark">
<tr>
<th scope="col">
<div class="custom-control">
<input type="checkbox" class="form-check-input" id="check-all">
</div>
</th>
<th scope="col">Entity type</th>
</tr>
</thead>
<tbody>
{#for entityType in info:entityTypes}
<tr>
<td>
<div class="custom-control">
<input type="checkbox" class="form-check-input checkbox" name="{entityType}" id="{entityType}">
</div>
</td>
<td>{entityType}</td>
</tr>
{/for}
</tbody>
</table>
</form>
<script type="text/javascript">
jQuery('#check-all').change(function() {
if (this.checked) {
jQuery('.checkbox').prop('checked', true);
} else {
jQuery('.checkbox').prop('checked', false);
}
});
</script>
{/body}
{/include}
| Tweak column size for Hibernate Search Dev UI | Tweak column size for Hibernate Search Dev UI
| HTML | apache-2.0 | quarkusio/quarkus,quarkusio/quarkus,quarkusio/quarkus,quarkusio/quarkus,quarkusio/quarkus | html | ## Code Before:
{#include main}
{#title}Index Entities{/title}
{#body}
<form method="post" enctype="application/x-www-form-urlencoded">
<input id="index" type="submit" class="btn btn-primary mb-2" value="Reindex Entities" >
<table id="table" class="table table-striped">
<thead class="thead-dark">
<tr>
<th scope="col">
<div class="custom-control">
<input type="checkbox" class="form-check-input" id="check-all">
</div>
</th>
<th scope="col">Entity type</th>
</tr>
</thead>
<tbody>
{#for entityType in info:entityTypes}
<tr>
<td>
<div class="custom-control">
<input type="checkbox" class="form-check-input checkbox" name="{entityType}" id="{entityType}">
</div>
</td>
<td>{entityType}</td>
</tr>
{/for}
</tbody>
</table>
</form>
<script type="text/javascript">
jQuery('#check-all').change(function() {
if (this.checked) {
jQuery('.checkbox').prop('checked', true);
} else {
jQuery('.checkbox').prop('checked', false);
}
});
</script>
{/body}
{/include}
## Instruction:
Tweak column size for Hibernate Search Dev UI
## Code After:
{#include main}
{#title}Index Entities{/title}
{#body}
<form method="post" enctype="application/x-www-form-urlencoded">
<input id="index" type="submit" class="btn btn-primary mb-2" value="Reindex Entities" >
<table id="table" class="table table-striped">
<colgroup>
<col span="1" style="width: 5%;">
<col span="1" style="width: 95%;">
</colgroup>
<thead class="thead-dark">
<tr>
<th scope="col">
<div class="custom-control">
<input type="checkbox" class="form-check-input" id="check-all">
</div>
</th>
<th scope="col">Entity type</th>
</tr>
</thead>
<tbody>
{#for entityType in info:entityTypes}
<tr>
<td>
<div class="custom-control">
<input type="checkbox" class="form-check-input checkbox" name="{entityType}" id="{entityType}">
</div>
</td>
<td>{entityType}</td>
</tr>
{/for}
</tbody>
</table>
</form>
<script type="text/javascript">
jQuery('#check-all').change(function() {
if (this.checked) {
jQuery('.checkbox').prop('checked', true);
} else {
jQuery('.checkbox').prop('checked', false);
}
});
</script>
{/body}
{/include}
|
11c87876af79a586905a52b6602af0a9fba1e308 | .travis.yml | .travis.yml | language: ruby
rvm:
- 2.0.0
before_script:
- cp config/database.travis.yml config/database.yml
- bundle exec rake db:setup
| language: ruby
rvm:
- 2.0.0
before_script:
- cp config/secrets.yml.example config/secrets.yml
- cp config/database.travis.yml config/database.yml
- bundle exec rake db:setup
| Use example secrets so that devise doesn't throw exception on tests | Use example secrets so that devise doesn't throw exception on tests
| YAML | agpl-3.0 | mysociety/publicwhip,mysociety/publicwhip,mysociety/publicwhip | yaml | ## Code Before:
language: ruby
rvm:
- 2.0.0
before_script:
- cp config/database.travis.yml config/database.yml
- bundle exec rake db:setup
## Instruction:
Use example secrets so that devise doesn't throw exception on tests
## Code After:
language: ruby
rvm:
- 2.0.0
before_script:
- cp config/secrets.yml.example config/secrets.yml
- cp config/database.travis.yml config/database.yml
- bundle exec rake db:setup
|
a1aea68f3a37d2a14e814c627a82ba55a2c11a74 | lib/asciidoctor-diagram/util/cli.rb | lib/asciidoctor-diagram/util/cli.rb | require 'tempfile'
require 'open3'
module Asciidoctor
module Diagram
# @private
module Cli
def self.run(*args)
stdout, stderr, status = Open3.capture3(*args)
if status != 0
raise "#{File.basename(args[0])} failed: #{stdout.empty? ? stderr : stdout}"
end
{
:out => stdout,
:err => stderr,
:status => status
}
end
end
end
end
| require 'tempfile'
require 'open3'
module Asciidoctor
module Diagram
# @private
module Cli
def self.run(*args)
stdout, stderr, status = Open3.capture3(*args)
if status.exitstatus != 0
raise "#{File.basename(args[0])} failed: #{stdout.empty? ? stderr : stdout}"
end
{
:out => stdout,
:err => stderr,
:status => status
}
end
end
end
end
| Correct process exit code check | Correct process exit code check
| Ruby | mit | asciidoctor/asciidoctor-diagram | ruby | ## Code Before:
require 'tempfile'
require 'open3'
module Asciidoctor
module Diagram
# @private
module Cli
def self.run(*args)
stdout, stderr, status = Open3.capture3(*args)
if status != 0
raise "#{File.basename(args[0])} failed: #{stdout.empty? ? stderr : stdout}"
end
{
:out => stdout,
:err => stderr,
:status => status
}
end
end
end
end
## Instruction:
Correct process exit code check
## Code After:
require 'tempfile'
require 'open3'
module Asciidoctor
module Diagram
# @private
module Cli
def self.run(*args)
stdout, stderr, status = Open3.capture3(*args)
if status.exitstatus != 0
raise "#{File.basename(args[0])} failed: #{stdout.empty? ? stderr : stdout}"
end
{
:out => stdout,
:err => stderr,
:status => status
}
end
end
end
end
|
ab96487072c6f6ac62ab07771891d1dd9b77d3ec | app/presenters/courses_presenter.rb | app/presenters/courses_presenter.rb | require "#{Rails.root}/lib/word_count"
#= Presenter for courses / cohort view
class CoursesPresenter
attr_reader :current_user, :cohort_param
def initialize(current_user, cohort_param)
@current_user = current_user
@cohort_param = cohort_param
end
def admin_courses
return unless current_user && current_user.admin?
Course.submitted_listed
end
def user_courses
return unless current_user
current_user.courses.current_and_future.listed
end
def cohort
return NullCohort.new if cohort_param == 'none'
return unless Cohort.exists?(slug: cohort_param)
Cohort.find_by(slug: cohort_param)
end
def courses
cohort.courses.listed
end
def courses_by_recent_edits
courses.sort_by(&:recent_edit_count).reverse
end
def word_count
WordCount.from_characters courses.sum(:character_sum)
end
end
#= Pseudo-Cohort that displays all unsubmitted, non-deleted courses
class NullCohort
def title
I18n.t("courses.unsubmitted")
end
def slug
'none'
end
def courses
Course.unsubmitted_listed.order(created_at: :desc)
end
def students_without_nonstudents
[]
end
def trained_percent
0
end
end
| require "#{Rails.root}/lib/word_count"
#= Presenter for courses / cohort view
class CoursesPresenter
attr_reader :current_user, :cohort_param
def initialize(current_user, cohort_param)
@current_user = current_user
@cohort_param = cohort_param
end
def admin_courses
return unless current_user && current_user.admin?
Course.submitted_listed
end
def user_courses
return unless current_user
current_user.courses.current_and_future.listed
end
def cohort
return NullCohort.new if cohort_param == 'none'
return unless Cohort.exists?(slug: cohort_param)
Cohort.find_by(slug: cohort_param)
end
def courses
cohort.courses.listed
end
def courses_by_recent_edits
# Sort first by recent edit count, and then by course title
courses.sort_by { |course| [-course.recent_edit_count, course.title] }
end
def word_count
WordCount.from_characters courses.sum(:character_sum)
end
end
#= Pseudo-Cohort that displays all unsubmitted, non-deleted courses
class NullCohort
def title
I18n.t("courses.unsubmitted")
end
def slug
'none'
end
def courses
Course.unsubmitted_listed.order(created_at: :desc)
end
def students_without_nonstudents
[]
end
def trained_percent
0
end
end
| Sort courses secondarily by title | Sort courses secondarily by title
| Ruby | mit | WikiEducationFoundation/WikiEduDashboard,feelfreelinux/WikiEduDashboard,majakomel/WikiEduDashboard,WikiEducationFoundation/WikiEduDashboard,sejalkhatri/WikiEduDashboard,KarmaHater/WikiEduDashboard,KarmaHater/WikiEduDashboard,Wowu/WikiEduDashboard,sejalkhatri/WikiEduDashboard,alpha721/WikiEduDashboard,alpha721/WikiEduDashboard,Wowu/WikiEduDashboard,feelfreelinux/WikiEduDashboard,MusikAnimal/WikiEduDashboard,MusikAnimal/WikiEduDashboard,KarmaHater/WikiEduDashboard,majakomel/WikiEduDashboard,KarmaHater/WikiEduDashboard,Wowu/WikiEduDashboard,WikiEducationFoundation/WikiEduDashboard,sejalkhatri/WikiEduDashboard,WikiEducationFoundation/WikiEduDashboard,Wowu/WikiEduDashboard,MusikAnimal/WikiEduDashboard,MusikAnimal/WikiEduDashboard,sejalkhatri/WikiEduDashboard,alpha721/WikiEduDashboard,majakomel/WikiEduDashboard,feelfreelinux/WikiEduDashboard,feelfreelinux/WikiEduDashboard,WikiEducationFoundation/WikiEduDashboard,sejalkhatri/WikiEduDashboard,majakomel/WikiEduDashboard,alpha721/WikiEduDashboard | ruby | ## Code Before:
require "#{Rails.root}/lib/word_count"
#= Presenter for courses / cohort view
class CoursesPresenter
attr_reader :current_user, :cohort_param
def initialize(current_user, cohort_param)
@current_user = current_user
@cohort_param = cohort_param
end
def admin_courses
return unless current_user && current_user.admin?
Course.submitted_listed
end
def user_courses
return unless current_user
current_user.courses.current_and_future.listed
end
def cohort
return NullCohort.new if cohort_param == 'none'
return unless Cohort.exists?(slug: cohort_param)
Cohort.find_by(slug: cohort_param)
end
def courses
cohort.courses.listed
end
def courses_by_recent_edits
courses.sort_by(&:recent_edit_count).reverse
end
def word_count
WordCount.from_characters courses.sum(:character_sum)
end
end
#= Pseudo-Cohort that displays all unsubmitted, non-deleted courses
class NullCohort
def title
I18n.t("courses.unsubmitted")
end
def slug
'none'
end
def courses
Course.unsubmitted_listed.order(created_at: :desc)
end
def students_without_nonstudents
[]
end
def trained_percent
0
end
end
## Instruction:
Sort courses secondarily by title
## Code After:
require "#{Rails.root}/lib/word_count"
#= Presenter for courses / cohort view
class CoursesPresenter
attr_reader :current_user, :cohort_param
def initialize(current_user, cohort_param)
@current_user = current_user
@cohort_param = cohort_param
end
def admin_courses
return unless current_user && current_user.admin?
Course.submitted_listed
end
def user_courses
return unless current_user
current_user.courses.current_and_future.listed
end
def cohort
return NullCohort.new if cohort_param == 'none'
return unless Cohort.exists?(slug: cohort_param)
Cohort.find_by(slug: cohort_param)
end
def courses
cohort.courses.listed
end
def courses_by_recent_edits
# Sort first by recent edit count, and then by course title
courses.sort_by { |course| [-course.recent_edit_count, course.title] }
end
def word_count
WordCount.from_characters courses.sum(:character_sum)
end
end
#= Pseudo-Cohort that displays all unsubmitted, non-deleted courses
class NullCohort
def title
I18n.t("courses.unsubmitted")
end
def slug
'none'
end
def courses
Course.unsubmitted_listed.order(created_at: :desc)
end
def students_without_nonstudents
[]
end
def trained_percent
0
end
end
|
cfcf5f25f11237750e9ffa2cb42534d168b48b41 | custom/01-pdf-tools.el | custom/01-pdf-tools.el | ;; pdf-tools for viewing PDFs
(use-package pdf-tools
:init
(pdf-tools-install)
;; Setup a hook when we kill a pdf take a bookmark
(add-hook 'kill-buffer-hook 'kill-buffer-hook-setup)
:defer 5
:config
;; Set a bookmark on kill
(defun kill-buffer-hook-setup ()
;; Test that we have a filename and file extensions and it's a pdf and the user
;; wants to take a bookmark
(if (and buffer-file-name
(file-name-extension buffer-file-name)
(string= (downcase (file-name-extension buffer-file-name)) "pdf")
(y-or-n-p "Set bookmark with current file name?"))
;; Set a bookmark with the name being the buffers full path name
(bookmark-set (file-name-nondirectory buffer-file-name) nil))))
| ;; pdf-tools for viewing PDFs
(use-package pdf-tools
:init
(pdf-tools-install)
;; Setup a hook when we kill a pdf take a bookmark
(add-hook 'kill-buffer-hook 'kill-buffer-hook-setup)
:config
;; Set a bookmark on kill
(defun kill-buffer-hook-setup ()
;; Test that we have a filename and file extensions and it's a pdf and the user
;; wants to take a bookmark
(if (and buffer-file-name
(file-name-extension buffer-file-name)
(string= (downcase (file-name-extension buffer-file-name)) "pdf")
(y-or-n-p "Set bookmark with current file name?"))
;; Set a bookmark with the name being the buffers full path name
(bookmark-set (file-name-nondirectory buffer-file-name) nil))))
| Remove defer for pdf tools | Remove defer for pdf tools
| Emacs Lisp | mit | map7/emacs-config,map7/emacs-config,map7/emacs-config | emacs-lisp | ## Code Before:
;; pdf-tools for viewing PDFs
(use-package pdf-tools
:init
(pdf-tools-install)
;; Setup a hook when we kill a pdf take a bookmark
(add-hook 'kill-buffer-hook 'kill-buffer-hook-setup)
:defer 5
:config
;; Set a bookmark on kill
(defun kill-buffer-hook-setup ()
;; Test that we have a filename and file extensions and it's a pdf and the user
;; wants to take a bookmark
(if (and buffer-file-name
(file-name-extension buffer-file-name)
(string= (downcase (file-name-extension buffer-file-name)) "pdf")
(y-or-n-p "Set bookmark with current file name?"))
;; Set a bookmark with the name being the buffers full path name
(bookmark-set (file-name-nondirectory buffer-file-name) nil))))
## Instruction:
Remove defer for pdf tools
## Code After:
;; pdf-tools for viewing PDFs
(use-package pdf-tools
:init
(pdf-tools-install)
;; Setup a hook when we kill a pdf take a bookmark
(add-hook 'kill-buffer-hook 'kill-buffer-hook-setup)
:config
;; Set a bookmark on kill
(defun kill-buffer-hook-setup ()
;; Test that we have a filename and file extensions and it's a pdf and the user
;; wants to take a bookmark
(if (and buffer-file-name
(file-name-extension buffer-file-name)
(string= (downcase (file-name-extension buffer-file-name)) "pdf")
(y-or-n-p "Set bookmark with current file name?"))
;; Set a bookmark with the name being the buffers full path name
(bookmark-set (file-name-nondirectory buffer-file-name) nil))))
|
bb71e1385ee495533887f5af4f7e737de0cfebc2 | metadata/com.michaldabski.filemanager.txt | metadata/com.michaldabski.filemanager.txt | Categories:System
License:MIT
Web Site:
Source Code:https://github.com/mick88/filemanager
Issue Tracker:https://github.com/mick88/filemanager/issues
Auto Name:File Manager Pro
Summary:File manager
Description:
A file manager.
.
Repo Type:git
Repo:https://github.com/mick88/filemanager.git
Build:0.3,5
commit=c1b37448572a1e5244fd89abe1eeec9956c11560
srclibs=1:MSQLite@4f4a7ce66332e3576a1ef993141d01dc9af2fe64,2:SystemBarTint@v1.0.3
rm=libs/msqlite.jar,libs/SystemBarTint.jar,libs/android-support-v4.jar
extlibs=android/android-support-v4.jar
target=android-19
Auto Update Mode:None
Update Check Mode:RepoManifest
Current Version:0.4
Current Version Code:6
| Categories:System
License:MIT
Web Site:
Source Code:https://github.com/mick88/filemanager
Issue Tracker:https://github.com/mick88/filemanager/issues
Auto Name:File Manager Pro
Summary:File manager
Description:
A file manager.
.
Repo Type:git
Repo:https://github.com/mick88/filemanager.git
Build:0.3,5
commit=c1b37448572a1e5244fd89abe1eeec9956c11560
srclibs=1:MSQLite@4f4a7ce66332e3576a1ef993141d01dc9af2fe64,2:SystemBarTint@v1.0.3
rm=libs/msqlite.jar,libs/SystemBarTint.jar,libs/android-support-v4.jar
extlibs=android/android-support-v4.jar
target=android-19
Build:0.4,6
commit=0.4
srclibs=1:MSQLite@4f4a7ce66332e3576a1ef993141d01dc9af2fe64,2:SystemBarTint@v1.0.3
rm=libs/msqlite.jar,libs/SystemBarTint.jar,libs/android-support-v4.jar
extlibs=android/android-support-v4.jar
target=android-19
Auto Update Mode:None
Update Check Mode:Tags
Current Version:0.4
Current Version Code:6
| Update File Manager Pro to 0.4 (6) | Update File Manager Pro to 0.4 (6)
| Text | agpl-3.0 | f-droid/fdroiddata,f-droid/fdroiddata,f-droid/fdroid-data | text | ## Code Before:
Categories:System
License:MIT
Web Site:
Source Code:https://github.com/mick88/filemanager
Issue Tracker:https://github.com/mick88/filemanager/issues
Auto Name:File Manager Pro
Summary:File manager
Description:
A file manager.
.
Repo Type:git
Repo:https://github.com/mick88/filemanager.git
Build:0.3,5
commit=c1b37448572a1e5244fd89abe1eeec9956c11560
srclibs=1:MSQLite@4f4a7ce66332e3576a1ef993141d01dc9af2fe64,2:SystemBarTint@v1.0.3
rm=libs/msqlite.jar,libs/SystemBarTint.jar,libs/android-support-v4.jar
extlibs=android/android-support-v4.jar
target=android-19
Auto Update Mode:None
Update Check Mode:RepoManifest
Current Version:0.4
Current Version Code:6
## Instruction:
Update File Manager Pro to 0.4 (6)
## Code After:
Categories:System
License:MIT
Web Site:
Source Code:https://github.com/mick88/filemanager
Issue Tracker:https://github.com/mick88/filemanager/issues
Auto Name:File Manager Pro
Summary:File manager
Description:
A file manager.
.
Repo Type:git
Repo:https://github.com/mick88/filemanager.git
Build:0.3,5
commit=c1b37448572a1e5244fd89abe1eeec9956c11560
srclibs=1:MSQLite@4f4a7ce66332e3576a1ef993141d01dc9af2fe64,2:SystemBarTint@v1.0.3
rm=libs/msqlite.jar,libs/SystemBarTint.jar,libs/android-support-v4.jar
extlibs=android/android-support-v4.jar
target=android-19
Build:0.4,6
commit=0.4
srclibs=1:MSQLite@4f4a7ce66332e3576a1ef993141d01dc9af2fe64,2:SystemBarTint@v1.0.3
rm=libs/msqlite.jar,libs/SystemBarTint.jar,libs/android-support-v4.jar
extlibs=android/android-support-v4.jar
target=android-19
Auto Update Mode:None
Update Check Mode:Tags
Current Version:0.4
Current Version Code:6
|
7608a6edc9179d915b541076a1c2d909e688dbf4 | examples/room/Cargo.toml | examples/room/Cargo.toml | [package]
name = "room_example"
version = "0.1.0"
authors = ["Imanol Fernandez <mortimergoro@gmail.com>"]
build = "build.rs"
[dependencies]
glutin = "0.7.4"
gleam = "0.4"
cgmath = "0.12"
image = "0.12"
android_glue = "0.2"
[target.'cfg(target_os = "android")'.dependencies]
android_injected_glue = {git = "https://github.com/mmatyas/android-rs-injected-glue"}
[dependencies.rust-webvr]
path = "../.."
| [package]
name = "room_example"
version = "0.1.0"
authors = ["Imanol Fernandez <mortimergoro@gmail.com>"]
build = "build.rs"
[dependencies]
glutin = { git = "https://github.com/MortimerGoro/glutin/", branch = "android_fixes" }
gleam = "0.4"
cgmath = "0.12"
image = "0.12"
android_glue = "0.2"
[target.'cfg(target_os = "android")'.dependencies]
android_injected_glue = {git = "https://github.com/mmatyas/android-rs-injected-glue"}
[dependencies.rust-webvr]
path = "../.."
| Use a custom fork of glutin for a correct Android life cycle | Use a custom fork of glutin for a correct Android life cycle
| TOML | mit | MortimerGoro/rust-webvr,MortimerGoro/rust-webvr,MortimerGoro/rust-webvr | toml | ## Code Before:
[package]
name = "room_example"
version = "0.1.0"
authors = ["Imanol Fernandez <mortimergoro@gmail.com>"]
build = "build.rs"
[dependencies]
glutin = "0.7.4"
gleam = "0.4"
cgmath = "0.12"
image = "0.12"
android_glue = "0.2"
[target.'cfg(target_os = "android")'.dependencies]
android_injected_glue = {git = "https://github.com/mmatyas/android-rs-injected-glue"}
[dependencies.rust-webvr]
path = "../.."
## Instruction:
Use a custom fork of glutin for a correct Android life cycle
## Code After:
[package]
name = "room_example"
version = "0.1.0"
authors = ["Imanol Fernandez <mortimergoro@gmail.com>"]
build = "build.rs"
[dependencies]
glutin = { git = "https://github.com/MortimerGoro/glutin/", branch = "android_fixes" }
gleam = "0.4"
cgmath = "0.12"
image = "0.12"
android_glue = "0.2"
[target.'cfg(target_os = "android")'.dependencies]
android_injected_glue = {git = "https://github.com/mmatyas/android-rs-injected-glue"}
[dependencies.rust-webvr]
path = "../.."
|
93bcf4b494111bb7d857363419103600e956bd85 | modules/warning/script.js | modules/warning/script.js | $(function() {
var $warning = $('#warning > ul');
// Configure the scrolling area
var nbItems = $warning.find('li').length;
var parentWidth = $warning.parent().css('width');
parentWidth = parentWidth.substr(0, parentWidth.indexOf('px'));
$warning.css('width', (nbItems * 100) + '%');
// Shows the element and starts horizontal scrolling
var scrollOnce = function() {
$warning.css('margin-left', parentWidth + 'px');
$warning.css('display', 'block');
$warning.animate({marginLeft: '-' + (nbItems * parentWidth) + 'px'}, nbItems * 10000, function() {
$warning.css('display', 'none');
setTimeout(scrollOnce, 0);
});
};
scrollOnce();
}); | $(function() {
var $warning = $('#warning > ul');
// Configure the scrolling area
var nbItems = $warning.find('li').length;
var parentWidth = $warning.parent().css('width');
parentWidth = parentWidth.substr(0, parentWidth.indexOf('px'));
$warning.css('width', (nbItems * 100) + '%');
// Shows the element and starts horizontal scrolling
var scrollOnce = function() {
$warning.css('margin-left', parentWidth + 'px');
$warning.css('display', 'block');
$warning.animate({marginLeft: '-' + (nbItems * parentWidth) + 'px'}, nbItems * 10000, 'linear', function() {
$warning.css('display', 'none');
setTimeout(scrollOnce, 0);
});
};
scrollOnce();
}); | Change scroll mode to linear for warning module | Modules: Change scroll mode to linear for warning module
| JavaScript | agpl-3.0 | ECAM-Brussels/ECAMTV,ECAM-Brussels/ECAMTV,ECAM-Brussels/ECAMTV | javascript | ## Code Before:
$(function() {
var $warning = $('#warning > ul');
// Configure the scrolling area
var nbItems = $warning.find('li').length;
var parentWidth = $warning.parent().css('width');
parentWidth = parentWidth.substr(0, parentWidth.indexOf('px'));
$warning.css('width', (nbItems * 100) + '%');
// Shows the element and starts horizontal scrolling
var scrollOnce = function() {
$warning.css('margin-left', parentWidth + 'px');
$warning.css('display', 'block');
$warning.animate({marginLeft: '-' + (nbItems * parentWidth) + 'px'}, nbItems * 10000, function() {
$warning.css('display', 'none');
setTimeout(scrollOnce, 0);
});
};
scrollOnce();
});
## Instruction:
Modules: Change scroll mode to linear for warning module
## Code After:
$(function() {
var $warning = $('#warning > ul');
// Configure the scrolling area
var nbItems = $warning.find('li').length;
var parentWidth = $warning.parent().css('width');
parentWidth = parentWidth.substr(0, parentWidth.indexOf('px'));
$warning.css('width', (nbItems * 100) + '%');
// Shows the element and starts horizontal scrolling
var scrollOnce = function() {
$warning.css('margin-left', parentWidth + 'px');
$warning.css('display', 'block');
$warning.animate({marginLeft: '-' + (nbItems * parentWidth) + 'px'}, nbItems * 10000, 'linear', function() {
$warning.css('display', 'none');
setTimeout(scrollOnce, 0);
});
};
scrollOnce();
}); |
c40c8d873a3c929da7fb4d1622f53dc1350bc053 | gradle.properties | gradle.properties | VERSION_NAME=1.0.0
GROUP=com.chrishorner
POM_DESCRIPTION=An overlay of lines to verify your UI elements adhere to specified intervals.
POM_URL=https://github.com/chris-horner/RhythmSticks/
POM_SCM_URL=https://github.com/chris-horner/RhythmSticks/
POM_SCM_CONNECTION=scm:git@github.com:chris-horner/RhythmSticks.git
POM_SCM_DEV_CONNECTION=scm:git@github.com:chris-horner/RhythmSticks.git
POM_LICENCE_NAME=The Apache Software License, Version 2.0
POM_LICENCE_URL=http://www.apache.org/licenses/LICENSE-2.0.txt
POM_LICENCE_DIST=repo
POM_DEVELOPER_ID=chris-horner
POM_DEVELOPER_NAME=Chris Horner | VERSION_NAME=1.0.1-SNAPSHOT
GROUP=com.chrishorner
POM_DESCRIPTION=An overlay of lines to verify your UI elements adhere to specified intervals.
POM_URL=https://github.com/chris-horner/RhythmSticks/
POM_SCM_URL=https://github.com/chris-horner/RhythmSticks/
POM_SCM_CONNECTION=scm:git@github.com:chris-horner/RhythmSticks.git
POM_SCM_DEV_CONNECTION=scm:git@github.com:chris-horner/RhythmSticks.git
POM_LICENCE_NAME=The Apache Software License, Version 2.0
POM_LICENCE_URL=http://www.apache.org/licenses/LICENSE-2.0.txt
POM_LICENCE_DIST=repo
POM_DEVELOPER_ID=chris-horner
POM_DEVELOPER_NAME=Chris Horner | Prepare for next development iteration | Prepare for next development iteration
| INI | apache-2.0 | chris-horner/RhythmSticks,0359xiaodong/RhythmSticks | ini | ## Code Before:
VERSION_NAME=1.0.0
GROUP=com.chrishorner
POM_DESCRIPTION=An overlay of lines to verify your UI elements adhere to specified intervals.
POM_URL=https://github.com/chris-horner/RhythmSticks/
POM_SCM_URL=https://github.com/chris-horner/RhythmSticks/
POM_SCM_CONNECTION=scm:git@github.com:chris-horner/RhythmSticks.git
POM_SCM_DEV_CONNECTION=scm:git@github.com:chris-horner/RhythmSticks.git
POM_LICENCE_NAME=The Apache Software License, Version 2.0
POM_LICENCE_URL=http://www.apache.org/licenses/LICENSE-2.0.txt
POM_LICENCE_DIST=repo
POM_DEVELOPER_ID=chris-horner
POM_DEVELOPER_NAME=Chris Horner
## Instruction:
Prepare for next development iteration
## Code After:
VERSION_NAME=1.0.1-SNAPSHOT
GROUP=com.chrishorner
POM_DESCRIPTION=An overlay of lines to verify your UI elements adhere to specified intervals.
POM_URL=https://github.com/chris-horner/RhythmSticks/
POM_SCM_URL=https://github.com/chris-horner/RhythmSticks/
POM_SCM_CONNECTION=scm:git@github.com:chris-horner/RhythmSticks.git
POM_SCM_DEV_CONNECTION=scm:git@github.com:chris-horner/RhythmSticks.git
POM_LICENCE_NAME=The Apache Software License, Version 2.0
POM_LICENCE_URL=http://www.apache.org/licenses/LICENSE-2.0.txt
POM_LICENCE_DIST=repo
POM_DEVELOPER_ID=chris-horner
POM_DEVELOPER_NAME=Chris Horner |
cb0eb26634b31bad7468f516a80fdac8ef10d276 | app/views/admin/shared/_topbar.html.erb | app/views/admin/shared/_topbar.html.erb | <nav class="topbar">
<div class="logo">
<%= link_to root_path do %>
<%= image_tag 'logo.png' %>
<% end %>
</div>
<div class="button-container">
<ul>
<li><%= link_to "Sign Out", destroy_admin_session_path, method: :delete %></li>
<li><%= link_to "Admin Home", admin_path %></li>
</ul>
</div>
</nav>
| <nav class="topbar">
<div class="logo">
<%= link_to root_path do %>
<%= image_tag 'logo.png' %>
<% end %>
</div>
<div class="button-container">
<ul>
<li><%= link_to "Sign Out", destroy_admin_session_path, method: :delete %></li>
<li><%= link_to "Admin Home", admin_path %></li>
<li><%= link_to "Products", admin_products_path%></li>
</ul>
</div>
</nav>
| Add products link to admin tobbar | Add products link to admin tobbar
Added a link to the admin topbar so you can navigate easilly to the
admin products page
| HTML+ERB | mit | gjh33/SimpleDream,gjh33/SimpleDream,gjh33/SimpleDream | html+erb | ## Code Before:
<nav class="topbar">
<div class="logo">
<%= link_to root_path do %>
<%= image_tag 'logo.png' %>
<% end %>
</div>
<div class="button-container">
<ul>
<li><%= link_to "Sign Out", destroy_admin_session_path, method: :delete %></li>
<li><%= link_to "Admin Home", admin_path %></li>
</ul>
</div>
</nav>
## Instruction:
Add products link to admin tobbar
Added a link to the admin topbar so you can navigate easilly to the
admin products page
## Code After:
<nav class="topbar">
<div class="logo">
<%= link_to root_path do %>
<%= image_tag 'logo.png' %>
<% end %>
</div>
<div class="button-container">
<ul>
<li><%= link_to "Sign Out", destroy_admin_session_path, method: :delete %></li>
<li><%= link_to "Admin Home", admin_path %></li>
<li><%= link_to "Products", admin_products_path%></li>
</ul>
</div>
</nav>
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.