commit stringlengths 40 40 | old_file stringlengths 4 184 | new_file stringlengths 4 184 | old_contents stringlengths 1 3.6k | new_contents stringlengths 5 3.38k | subject stringlengths 15 778 | message stringlengths 16 6.74k | lang stringclasses 201 values | license stringclasses 13 values | repos stringlengths 6 116k | config stringclasses 201 values | content stringlengths 137 7.24k | diff stringlengths 26 5.55k | diff_length int64 1 123 | relative_diff_length float64 0.01 89 | n_lines_added int64 0 108 | n_lines_deleted int64 0 106 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
393b7892c37e43cd9f808b7ca440d0d88d6182c1 | src/templates/components/mini-calendar.twig | src/templates/components/mini-calendar.twig | <div class="flex items-baseline nl3 nr3 ml0-m mr0-m pa3 pb0 bg-yellow">
<div class="flex-auto nb3">
{% include "components/title.twig" with { "content": "Events", "color": "white" } %}
</div>
<a class="lh-title white hover-white underline" href="{{ site.link }}/events">View All</a>
</div>
<div id="fullcalendar" class="mini"></div>
| <div class="flex items-baseline nl3 nr3 ml0-m mr0-m pa3 pb0 bg-yellow">
<div class="flex-auto nb3">
{% include "components/title.twig" with {
"content": now|date("F") ~ " Events",
"color": "white"
} %}
</div>
<a class="lh-title white hover-white underline" href="{{ site.link }}/events">More</a>
</div>
<div id="fullcalendar" class="mini"></div>
| Add month name to mini calendar | Add month name to mini calendar
| Twig | isc | work-shop/medmates,work-shop/medmates,work-shop/medmates,work-shop/medmates | twig | ## Code Before:
<div class="flex items-baseline nl3 nr3 ml0-m mr0-m pa3 pb0 bg-yellow">
<div class="flex-auto nb3">
{% include "components/title.twig" with { "content": "Events", "color": "white" } %}
</div>
<a class="lh-title white hover-white underline" href="{{ site.link }}/events">View All</a>
</div>
<div id="fullcalendar" class="mini"></div>
## Instruction:
Add month name to mini calendar
## Code After:
<div class="flex items-baseline nl3 nr3 ml0-m mr0-m pa3 pb0 bg-yellow">
<div class="flex-auto nb3">
{% include "components/title.twig" with {
"content": now|date("F") ~ " Events",
"color": "white"
} %}
</div>
<a class="lh-title white hover-white underline" href="{{ site.link }}/events">More</a>
</div>
<div id="fullcalendar" class="mini"></div>
| <div class="flex items-baseline nl3 nr3 ml0-m mr0-m pa3 pb0 bg-yellow">
<div class="flex-auto nb3">
- {% include "components/title.twig" with { "content": "Events", "color": "white" } %}
+ {% include "components/title.twig" with {
+ "content": now|date("F") ~ " Events",
+ "color": "white"
+ } %}
</div>
- <a class="lh-title white hover-white underline" href="{{ site.link }}/events">View All</a>
? ^^ -----
+ <a class="lh-title white hover-white underline" href="{{ site.link }}/events">More</a>
? ^^^
</div>
<div id="fullcalendar" class="mini"></div> | 7 | 0.777778 | 5 | 2 |
f48eb543c3ae2222a71080592ae8932c227dc605 | roche/scripts/xml-load.py | roche/scripts/xml-load.py |
import sys
sys.path.append('../../')
import roche.settings
from eulexistdb.db import ExistDB
from roche.settings import EXISTDB_SERVER_URL
#
# Timeout higher?
#
xmldb = ExistDB(timeout=30)
xmldb.createCollection('docker', True)
xmldb.createCollection(u'docker/浙江大學圖書館', True)
with open('../../../dublin-store/db/test_001.xml') as f:
xmldb.load(f, '/docker/001.xml', True)
|
import sys
sys.path.append('.')
import roche.settings
from eulexistdb.db import ExistDB
from roche.settings import EXISTDB_SERVER_URL
#
# Timeout higher?
#
xmldb = ExistDB(timeout=30)
xmldb.createCollection('docker', True)
xmldb.createCollection(u'docker/浙江大學圖書館', True)
with open('../dublin-store/db/test_001.xml') as f:
xmldb.load(f, '/docker/001.xml', True)
| Fix relative path in relation to app root dir | Fix relative path in relation to app root dir
| Python | mit | beijingren/roche-website,beijingren/roche-website,beijingren/roche-website,beijingren/roche-website | python | ## Code Before:
import sys
sys.path.append('../../')
import roche.settings
from eulexistdb.db import ExistDB
from roche.settings import EXISTDB_SERVER_URL
#
# Timeout higher?
#
xmldb = ExistDB(timeout=30)
xmldb.createCollection('docker', True)
xmldb.createCollection(u'docker/浙江大學圖書館', True)
with open('../../../dublin-store/db/test_001.xml') as f:
xmldb.load(f, '/docker/001.xml', True)
## Instruction:
Fix relative path in relation to app root dir
## Code After:
import sys
sys.path.append('.')
import roche.settings
from eulexistdb.db import ExistDB
from roche.settings import EXISTDB_SERVER_URL
#
# Timeout higher?
#
xmldb = ExistDB(timeout=30)
xmldb.createCollection('docker', True)
xmldb.createCollection(u'docker/浙江大學圖書館', True)
with open('../dublin-store/db/test_001.xml') as f:
xmldb.load(f, '/docker/001.xml', True)
|
import sys
- sys.path.append('../../')
? -----
+ sys.path.append('.')
import roche.settings
from eulexistdb.db import ExistDB
from roche.settings import EXISTDB_SERVER_URL
#
# Timeout higher?
#
xmldb = ExistDB(timeout=30)
xmldb.createCollection('docker', True)
xmldb.createCollection(u'docker/浙江大學圖書館', True)
- with open('../../../dublin-store/db/test_001.xml') as f:
? ------
+ with open('../dublin-store/db/test_001.xml') as f:
xmldb.load(f, '/docker/001.xml', True)
| 4 | 0.210526 | 2 | 2 |
f3583f65857ccb82d1c09881c3ba5a87df3e6f11 | .travis.yml | .travis.yml | dist: trusty
sudo: false
language: erlang
otp_release:
- 19.3.6.1
addons:
apt:
packages:
- r-base
before_install:
- ./ci before_install ${JOB:?} "${TRAVIS_BUILD_DIR:?}"
install:
- ./ci install ${JOB:?} "${TRAVIS_BUILD_DIR:?}"
before_script:
- ./ci before_script ${JOB:?} "${TRAVIS_BUILD_DIR:?}"
script:
- ./ci script ${JOB:?} "${TRAVIS_BUILD_DIR:?}"
matrix:
allow_failures:
fast_finish: true
env:
- JOB=minloadtest
- JOB=dialyzer
- JOB=xref
cache:
directories:
- .plt
| dist: trusty
sudo: required
language: erlang
otp_release:
- 19.3.6.1
before_install:
- sudo apt-get -qq update
- sudo apt-get install -y r-cran-plyr r-cran-ggplot2
- ./ci before_install ${JOB:?} "${TRAVIS_BUILD_DIR:?}"
install:
- ./ci install ${JOB:?} "${TRAVIS_BUILD_DIR:?}"
before_script:
- ./ci before_script ${JOB:?} "${TRAVIS_BUILD_DIR:?}"
script:
- ./ci script ${JOB:?} "${TRAVIS_BUILD_DIR:?}"
matrix:
allow_failures:
fast_finish: true
env:
- JOB=minloadtest
- JOB=dialyzer
- JOB=xref
cache:
directories:
- .plt
| Fix for graphs on CI: prefer R dependencies from Ubuntu repo because of old R | Fix for graphs on CI: prefer R dependencies from Ubuntu repo because of old R
Refs:
* https://docs.travis-ci.com/user/installing-dependencies/#Installing-Packages-on-Standard-Infrastructure
* https://docs.travis-ci.com/user/reference/trusty/#Routing-to-Trusty
Fixes error:
```
Rscript --vanilla priv/summary.r -i tests/current
[1] "plyr"
Loading required package: plyr
Installing package into ‘/home/travis/build/lucafavatella/basho_bench/tmp/R_libs’
(as ‘lib’ is unspecified)
Error in library(p, character.only = TRUE) :
there is no package called ‘plyr’
Calls: source -> withVisible -> eval -> eval -> library
In addition: Warning message:
package ‘plyr’ is not available (for R version 3.0.2)
Execution halted
make: *** [results] Error 1
```
| YAML | apache-2.0 | mrallen1/basho_bench,mrallen1/basho_bench,mrallen1/basho_bench,mrallen1/basho_bench | yaml | ## Code Before:
dist: trusty
sudo: false
language: erlang
otp_release:
- 19.3.6.1
addons:
apt:
packages:
- r-base
before_install:
- ./ci before_install ${JOB:?} "${TRAVIS_BUILD_DIR:?}"
install:
- ./ci install ${JOB:?} "${TRAVIS_BUILD_DIR:?}"
before_script:
- ./ci before_script ${JOB:?} "${TRAVIS_BUILD_DIR:?}"
script:
- ./ci script ${JOB:?} "${TRAVIS_BUILD_DIR:?}"
matrix:
allow_failures:
fast_finish: true
env:
- JOB=minloadtest
- JOB=dialyzer
- JOB=xref
cache:
directories:
- .plt
## Instruction:
Fix for graphs on CI: prefer R dependencies from Ubuntu repo because of old R
Refs:
* https://docs.travis-ci.com/user/installing-dependencies/#Installing-Packages-on-Standard-Infrastructure
* https://docs.travis-ci.com/user/reference/trusty/#Routing-to-Trusty
Fixes error:
```
Rscript --vanilla priv/summary.r -i tests/current
[1] "plyr"
Loading required package: plyr
Installing package into ‘/home/travis/build/lucafavatella/basho_bench/tmp/R_libs’
(as ‘lib’ is unspecified)
Error in library(p, character.only = TRUE) :
there is no package called ‘plyr’
Calls: source -> withVisible -> eval -> eval -> library
In addition: Warning message:
package ‘plyr’ is not available (for R version 3.0.2)
Execution halted
make: *** [results] Error 1
```
## Code After:
dist: trusty
sudo: required
language: erlang
otp_release:
- 19.3.6.1
before_install:
- sudo apt-get -qq update
- sudo apt-get install -y r-cran-plyr r-cran-ggplot2
- ./ci before_install ${JOB:?} "${TRAVIS_BUILD_DIR:?}"
install:
- ./ci install ${JOB:?} "${TRAVIS_BUILD_DIR:?}"
before_script:
- ./ci before_script ${JOB:?} "${TRAVIS_BUILD_DIR:?}"
script:
- ./ci script ${JOB:?} "${TRAVIS_BUILD_DIR:?}"
matrix:
allow_failures:
fast_finish: true
env:
- JOB=minloadtest
- JOB=dialyzer
- JOB=xref
cache:
directories:
- .plt
| dist: trusty
- sudo: false
+ sudo: required
language: erlang
otp_release:
- 19.3.6.1
- addons:
- apt:
- packages:
- - r-base
before_install:
+ - sudo apt-get -qq update
+ - sudo apt-get install -y r-cran-plyr r-cran-ggplot2
- ./ci before_install ${JOB:?} "${TRAVIS_BUILD_DIR:?}"
install:
- ./ci install ${JOB:?} "${TRAVIS_BUILD_DIR:?}"
before_script:
- ./ci before_script ${JOB:?} "${TRAVIS_BUILD_DIR:?}"
script:
- ./ci script ${JOB:?} "${TRAVIS_BUILD_DIR:?}"
matrix:
allow_failures:
fast_finish: true
env:
- JOB=minloadtest
- JOB=dialyzer
- JOB=xref
cache:
directories:
- .plt | 8 | 0.296296 | 3 | 5 |
0771dc40d784c9b3af23f6b2449c9d7f1c06775f | spec/default_config.yml | spec/default_config.yml | maximum_unused_login_ticket_lifetime: 300
authenticator:
class: Cassy::Authenticators::Test
password: spec_password
#custom_views: /path/to/custom/views
default_locale: en
#db_log:
# file: casserver_spec_db.log
enable_single_sign_out: true
#maximum_unused_login_ticket_lifetime: 300
#maximum_unused_service_ticket_lifetime: 300
#maximum_session_lifetime: 172800
#downcase_username: true
| maximum_unused_login_ticket_lifetime: 300
maximum_unused_service_ticket_lifetime: 300
authenticator:
class: Cassy::Authenticators::Test
password: spec_password
#custom_views: /path/to/custom/views
default_locale: en
#db_log:
# file: casserver_spec_db.log
enable_single_sign_out: true
#maximum_unused_login_ticket_lifetime: 300
#maximum_unused_service_ticket_lifetime: 300
#maximum_session_lifetime: 172800
#downcase_username: true
| Add maximum_unused_service_ticket_lifetime to default config | Add maximum_unused_service_ticket_lifetime to default config
| YAML | mit | OneHQ/cassy,nbudin/cassy,nbudin/cassy,reinteractive/cassy,reinteractive/cassy,nbudin/cassy,OneHQ/cassy,OneHQ/cassy,backupify/cassy,backupify/cassy,backupify/cassy | yaml | ## Code Before:
maximum_unused_login_ticket_lifetime: 300
authenticator:
class: Cassy::Authenticators::Test
password: spec_password
#custom_views: /path/to/custom/views
default_locale: en
#db_log:
# file: casserver_spec_db.log
enable_single_sign_out: true
#maximum_unused_login_ticket_lifetime: 300
#maximum_unused_service_ticket_lifetime: 300
#maximum_session_lifetime: 172800
#downcase_username: true
## Instruction:
Add maximum_unused_service_ticket_lifetime to default config
## Code After:
maximum_unused_login_ticket_lifetime: 300
maximum_unused_service_ticket_lifetime: 300
authenticator:
class: Cassy::Authenticators::Test
password: spec_password
#custom_views: /path/to/custom/views
default_locale: en
#db_log:
# file: casserver_spec_db.log
enable_single_sign_out: true
#maximum_unused_login_ticket_lifetime: 300
#maximum_unused_service_ticket_lifetime: 300
#maximum_session_lifetime: 172800
#downcase_username: true
| maximum_unused_login_ticket_lifetime: 300
+ maximum_unused_service_ticket_lifetime: 300
authenticator:
class: Cassy::Authenticators::Test
password: spec_password
#custom_views: /path/to/custom/views
default_locale: en
#db_log:
# file: casserver_spec_db.log
enable_single_sign_out: true
#maximum_unused_login_ticket_lifetime: 300
#maximum_unused_service_ticket_lifetime: 300
#maximum_session_lifetime: 172800
#downcase_username: true | 1 | 0.047619 | 1 | 0 |
eb2adee3c086bc7f1f5b3a5517dda26fd74a86bc | Casks/xscope.rb | Casks/xscope.rb | cask :v1 => 'xscope' do
version '4.1.4'
sha256 '6617f7ec94e22d3e1e137bf6386188c827575e7778f8d1d69dce8817efa752f2'
url "https://iconfactory.com/assets/software/xscope/xScope-#{version}.zip"
appcast 'https://iconfactory.com/appcasts/xScope/appcast.xml',
:sha256 => '29eaf2c30992f4c72e4a98d96ce912a1cfde16d751a6b68044ef3c116672d8eb'
name 'xScope'
homepage 'http://iconfactory.com/software/xscope'
license :commercial
app 'xScope.app'
end
| cask :v1 => 'xscope' do
version '4.1.4'
sha256 'd9f1eab342b885277344829eb7b10fa636b76991a9367fb2319f32a48a386ea2'
url "https://iconfactory.com/assets/software/xscope/xScope-#{version}.zip"
appcast 'https://iconfactory.com/appcasts/xScope/appcast.xml',
:sha256 => '29eaf2c30992f4c72e4a98d96ce912a1cfde16d751a6b68044ef3c116672d8eb'
name 'xScope'
homepage 'http://iconfactory.com/software/xscope'
license :commercial
app 'xScope.app'
end
| Update SHA256 for xScope 4.1.4 | Update SHA256 for xScope 4.1.4
| Ruby | bsd-2-clause | mattrobenolt/homebrew-cask,andrewdisley/homebrew-cask,vitorgalvao/homebrew-cask,leipert/homebrew-cask,ptb/homebrew-cask,FinalDes/homebrew-cask,mindriot101/homebrew-cask,MichaelPei/homebrew-cask,uetchy/homebrew-cask,thii/homebrew-cask,kpearson/homebrew-cask,kesara/homebrew-cask,andyli/homebrew-cask,diogodamiani/homebrew-cask,kkdd/homebrew-cask,miccal/homebrew-cask,imgarylai/homebrew-cask,malob/homebrew-cask,jasmas/homebrew-cask,mishari/homebrew-cask,napaxton/homebrew-cask,nathancahill/homebrew-cask,crzrcn/homebrew-cask,aguynamedryan/homebrew-cask,nathanielvarona/homebrew-cask,renard/homebrew-cask,xight/homebrew-cask,larseggert/homebrew-cask,mahori/homebrew-cask,brianshumate/homebrew-cask,danielbayley/homebrew-cask,nathanielvarona/homebrew-cask,scottsuch/homebrew-cask,ebraminio/homebrew-cask,arronmabrey/homebrew-cask,scottsuch/homebrew-cask,retbrown/homebrew-cask,farmerchris/homebrew-cask,blogabe/homebrew-cask,caskroom/homebrew-cask,corbt/homebrew-cask,inz/homebrew-cask,Labutin/homebrew-cask,lucasmezencio/homebrew-cask,johnjelinek/homebrew-cask,wastrachan/homebrew-cask,nshemonsky/homebrew-cask,athrunsun/homebrew-cask,gibsjose/homebrew-cask,asbachb/homebrew-cask,yutarody/homebrew-cask,schneidmaster/homebrew-cask,dcondrey/homebrew-cask,RJHsiao/homebrew-cask,williamboman/homebrew-cask,tarwich/homebrew-cask,claui/homebrew-cask,wickedsp1d3r/homebrew-cask,seanorama/homebrew-cask,ahundt/homebrew-cask,tsparber/homebrew-cask,miku/homebrew-cask,franklouwers/homebrew-cask,jppelteret/homebrew-cask,mlocher/homebrew-cask,n0ts/homebrew-cask,mathbunnyru/homebrew-cask,xakraz/homebrew-cask,JacopKane/homebrew-cask,stephenwade/homebrew-cask,fharbe/homebrew-cask,bdhess/homebrew-cask,lukeadams/homebrew-cask,MircoT/homebrew-cask,leipert/homebrew-cask,stevehedrick/homebrew-cask,JosephViolago/homebrew-cask,devmynd/homebrew-cask,singingwolfboy/homebrew-cask,jbeagley52/homebrew-cask,tedbundyjr/homebrew-cask,xtian/homebrew-cask,dwkns/homebrew-cask,Ephemera/homebrew-cask,jayshao/homebrew-cask,linc01n/homebrew-cask,ksylvan/homebrew-cask,yumitsu/homebrew-cask,Amorymeltzer/homebrew-cask,ianyh/homebrew-cask,mgryszko/homebrew-cask,afh/homebrew-cask,psibre/homebrew-cask,inz/homebrew-cask,nathansgreen/homebrew-cask,mgryszko/homebrew-cask,mindriot101/homebrew-cask,imgarylai/homebrew-cask,toonetown/homebrew-cask,artdevjs/homebrew-cask,crzrcn/homebrew-cask,malford/homebrew-cask,perfide/homebrew-cask,tan9/homebrew-cask,otaran/homebrew-cask,santoshsahoo/homebrew-cask,claui/homebrew-cask,lvicentesanchez/homebrew-cask,malford/homebrew-cask,chrisfinazzo/homebrew-cask,Ketouem/homebrew-cask,robertgzr/homebrew-cask,miguelfrde/homebrew-cask,shoichiaizawa/homebrew-cask,kassi/homebrew-cask,mhubig/homebrew-cask,corbt/homebrew-cask,theoriginalgri/homebrew-cask,shonjir/homebrew-cask,patresi/homebrew-cask,puffdad/homebrew-cask,squid314/homebrew-cask,haha1903/homebrew-cask,lvicentesanchez/homebrew-cask,nrlquaker/homebrew-cask,JacopKane/homebrew-cask,elnappo/homebrew-cask,AnastasiaSulyagina/homebrew-cask,scribblemaniac/homebrew-cask,dictcp/homebrew-cask,tarwich/homebrew-cask,hanxue/caskroom,paour/homebrew-cask,alexg0/homebrew-cask,JosephViolago/homebrew-cask,sjackman/homebrew-cask,michelegera/homebrew-cask,kongslund/homebrew-cask,nightscape/homebrew-cask,JacopKane/homebrew-cask,My2ndAngelic/homebrew-cask,zerrot/homebrew-cask,mrmachine/homebrew-cask,antogg/homebrew-cask,vin047/homebrew-cask,cobyism/homebrew-cask,a1russell/homebrew-cask,nightscape/homebrew-cask,janlugt/homebrew-cask,deiga/homebrew-cask,0xadada/homebrew-cask,syscrusher/homebrew-cask,jasmas/homebrew-cask,Ibuprofen/homebrew-cask,jeroenj/homebrew-cask,ptb/homebrew-cask,samshadwell/homebrew-cask,dustinblackman/homebrew-cask,janlugt/homebrew-cask,caskroom/homebrew-cask,jeroenj/homebrew-cask,jalaziz/homebrew-cask,SentinelWarren/homebrew-cask,samnung/homebrew-cask,skatsuta/homebrew-cask,greg5green/homebrew-cask,chuanxd/homebrew-cask,mathbunnyru/homebrew-cask,bcomnes/homebrew-cask,josa42/homebrew-cask,lumaxis/homebrew-cask,moimikey/homebrew-cask,CameronGarrett/homebrew-cask,mjgardner/homebrew-cask,johndbritton/homebrew-cask,chrisfinazzo/homebrew-cask,mazehall/homebrew-cask,bric3/homebrew-cask,yuhki50/homebrew-cask,jedahan/homebrew-cask,gabrielizaias/homebrew-cask,deanmorin/homebrew-cask,andrewdisley/homebrew-cask,Keloran/homebrew-cask,yurikoles/homebrew-cask,morganestes/homebrew-cask,jeanregisser/homebrew-cask,timsutton/homebrew-cask,nathancahill/homebrew-cask,chadcatlett/caskroom-homebrew-cask,flaviocamilo/homebrew-cask,ksato9700/homebrew-cask,tjnycum/homebrew-cask,cliffcotino/homebrew-cask,nshemonsky/homebrew-cask,xakraz/homebrew-cask,neverfox/homebrew-cask,stigkj/homebrew-caskroom-cask,cobyism/homebrew-cask,gerrypower/homebrew-cask,ldong/homebrew-cask,klane/homebrew-cask,FranklinChen/homebrew-cask,victorpopkov/homebrew-cask,MerelyAPseudonym/homebrew-cask,Gasol/homebrew-cask,sjackman/homebrew-cask,Bombenleger/homebrew-cask,moimikey/homebrew-cask,exherb/homebrew-cask,pablote/homebrew-cask,ywfwj2008/homebrew-cask,optikfluffel/homebrew-cask,13k/homebrew-cask,yutarody/homebrew-cask,joschi/homebrew-cask,psibre/homebrew-cask,tedski/homebrew-cask,miku/homebrew-cask,sosedoff/homebrew-cask,koenrh/homebrew-cask,My2ndAngelic/homebrew-cask,nathansgreen/homebrew-cask,hristozov/homebrew-cask,kronicd/homebrew-cask,tjnycum/homebrew-cask,jacobbednarz/homebrew-cask,reelsense/homebrew-cask,jgarber623/homebrew-cask,rajiv/homebrew-cask,devmynd/homebrew-cask,lukasbestle/homebrew-cask,Keloran/homebrew-cask,cfillion/homebrew-cask,andrewdisley/homebrew-cask,pacav69/homebrew-cask,nathanielvarona/homebrew-cask,kTitan/homebrew-cask,riyad/homebrew-cask,Gasol/homebrew-cask,kamilboratynski/homebrew-cask,goxberry/homebrew-cask,xcezx/homebrew-cask,bric3/homebrew-cask,markhuber/homebrew-cask,ebraminio/homebrew-cask,gmkey/homebrew-cask,mazehall/homebrew-cask,elyscape/homebrew-cask,buo/homebrew-cask,ddm/homebrew-cask,jawshooah/homebrew-cask,slack4u/homebrew-cask,reelsense/homebrew-cask,fkrone/homebrew-cask,xight/homebrew-cask,MerelyAPseudonym/homebrew-cask,Ephemera/homebrew-cask,Ngrd/homebrew-cask,chadcatlett/caskroom-homebrew-cask,jgarber623/homebrew-cask,greg5green/homebrew-cask,diogodamiani/homebrew-cask,shoichiaizawa/homebrew-cask,retbrown/homebrew-cask,imgarylai/homebrew-cask,coeligena/homebrew-customized,alexg0/homebrew-cask,deiga/homebrew-cask,gilesdring/homebrew-cask,wKovacs64/homebrew-cask,MoOx/homebrew-cask,opsdev-ws/homebrew-cask,ddm/homebrew-cask,ftiff/homebrew-cask,SentinelWarren/homebrew-cask,cliffcotino/homebrew-cask,shoichiaizawa/homebrew-cask,fanquake/homebrew-cask,gurghet/homebrew-cask,Dremora/homebrew-cask,amatos/homebrew-cask,alebcay/homebrew-cask,mahori/homebrew-cask,vigosan/homebrew-cask,optikfluffel/homebrew-cask,Ngrd/homebrew-cask,jonathanwiesel/homebrew-cask,lumaxis/homebrew-cask,FredLackeyOfficial/homebrew-cask,tyage/homebrew-cask,victorpopkov/homebrew-cask,gibsjose/homebrew-cask,cedwardsmedia/homebrew-cask,miccal/homebrew-cask,forevergenin/homebrew-cask,lukeadams/homebrew-cask,hanxue/caskroom,johndbritton/homebrew-cask,alebcay/homebrew-cask,yurikoles/homebrew-cask,m3nu/homebrew-cask,fkrone/homebrew-cask,seanzxx/homebrew-cask,sgnh/homebrew-cask,schneidmaster/homebrew-cask,alebcay/homebrew-cask,kongslund/homebrew-cask,RJHsiao/homebrew-cask,ftiff/homebrew-cask,paour/homebrew-cask,deiga/homebrew-cask,hellosky806/homebrew-cask,tangestani/homebrew-cask,wickles/homebrew-cask,colindean/homebrew-cask,axodys/homebrew-cask,skatsuta/homebrew-cask,scribblemaniac/homebrew-cask,lifepillar/homebrew-cask,rickychilcott/homebrew-cask,stephenwade/homebrew-cask,retrography/homebrew-cask,helloIAmPau/homebrew-cask,KosherBacon/homebrew-cask,gurghet/homebrew-cask,arronmabrey/homebrew-cask,antogg/homebrew-cask,otaran/homebrew-cask,doits/homebrew-cask,malob/homebrew-cask,dictcp/homebrew-cask,fharbe/homebrew-cask,casidiablo/homebrew-cask,0rax/homebrew-cask,MoOx/homebrew-cask,xtian/homebrew-cask,Cottser/homebrew-cask,miguelfrde/homebrew-cask,jaredsampson/homebrew-cask,bcomnes/homebrew-cask,mahori/homebrew-cask,mjdescy/homebrew-cask,zmwangx/homebrew-cask,lcasey001/homebrew-cask,reitermarkus/homebrew-cask,deanmorin/homebrew-cask,mattrobenolt/homebrew-cask,alexg0/homebrew-cask,jconley/homebrew-cask,sscotth/homebrew-cask,sanyer/homebrew-cask,coeligena/homebrew-customized,kiliankoe/homebrew-cask,danielbayley/homebrew-cask,Dremora/homebrew-cask,bosr/homebrew-cask,ayohrling/homebrew-cask,mishari/homebrew-cask,0rax/homebrew-cask,joshka/homebrew-cask,markthetech/homebrew-cask,casidiablo/homebrew-cask,stonehippo/homebrew-cask,xyb/homebrew-cask,jppelteret/homebrew-cask,codeurge/homebrew-cask,dcondrey/homebrew-cask,ericbn/homebrew-cask,farmerchris/homebrew-cask,boecko/homebrew-cask,ninjahoahong/homebrew-cask,michelegera/homebrew-cask,jedahan/homebrew-cask,blainesch/homebrew-cask,markhuber/homebrew-cask,sohtsuka/homebrew-cask,claui/homebrew-cask,kiliankoe/homebrew-cask,maxnordlund/homebrew-cask,hristozov/homebrew-cask,feigaochn/homebrew-cask,tyage/homebrew-cask,y00rb/homebrew-cask,usami-k/homebrew-cask,ayohrling/homebrew-cask,nrlquaker/homebrew-cask,shorshe/homebrew-cask,sohtsuka/homebrew-cask,inta/homebrew-cask,mchlrmrz/homebrew-cask,thomanq/homebrew-cask,stigkj/homebrew-caskroom-cask,codeurge/homebrew-cask,renaudguerin/homebrew-cask,pablote/homebrew-cask,jalaziz/homebrew-cask,asins/homebrew-cask,gyndav/homebrew-cask,julionc/homebrew-cask,Ibuprofen/homebrew-cask,jacobbednarz/homebrew-cask,mwean/homebrew-cask,giannitm/homebrew-cask,jalaziz/homebrew-cask,jangalinski/homebrew-cask,flaviocamilo/homebrew-cask,MircoT/homebrew-cask,sscotth/homebrew-cask,JosephViolago/homebrew-cask,patresi/homebrew-cask,cprecioso/homebrew-cask,slack4u/homebrew-cask,troyxmccall/homebrew-cask,guerrero/homebrew-cask,tmoreira2020/homebrew,Saklad5/homebrew-cask,kingthorin/homebrew-cask,joschi/homebrew-cask,bdhess/homebrew-cask,gerrypower/homebrew-cask,santoshsahoo/homebrew-cask,wmorin/homebrew-cask,malob/homebrew-cask,lukasbestle/homebrew-cask,doits/homebrew-cask,jiashuw/homebrew-cask,englishm/homebrew-cask,a1russell/homebrew-cask,mathbunnyru/homebrew-cask,nrlquaker/homebrew-cask,elyscape/homebrew-cask,axodys/homebrew-cask,AnastasiaSulyagina/homebrew-cask,mhubig/homebrew-cask,hyuna917/homebrew-cask,n0ts/homebrew-cask,blogabe/homebrew-cask,joshka/homebrew-cask,reitermarkus/homebrew-cask,hanxue/caskroom,amatos/homebrew-cask,samshadwell/homebrew-cask,BenjaminHCCarr/homebrew-cask,ywfwj2008/homebrew-cask,xyb/homebrew-cask,sgnh/homebrew-cask,dvdoliveira/homebrew-cask,faun/homebrew-cask,perfide/homebrew-cask,0xadada/homebrew-cask,cobyism/homebrew-cask,sanyer/homebrew-cask,Ephemera/homebrew-cask,tangestani/homebrew-cask,maxnordlund/homebrew-cask,koenrh/homebrew-cask,sebcode/homebrew-cask,Bombenleger/homebrew-cask,yumitsu/homebrew-cask,decrement/homebrew-cask,gabrielizaias/homebrew-cask,neverfox/homebrew-cask,joshka/homebrew-cask,sanchezm/homebrew-cask,diguage/homebrew-cask,joschi/homebrew-cask,lantrix/homebrew-cask,moogar0880/homebrew-cask,adrianchia/homebrew-cask,m3nu/homebrew-cask,mariusbutuc/homebrew-cask,xcezx/homebrew-cask,stevehedrick/homebrew-cask,hakamadare/homebrew-cask,opsdev-ws/homebrew-cask,vitorgalvao/homebrew-cask,13k/homebrew-cask,antogg/homebrew-cask,winkelsdorf/homebrew-cask,toonetown/homebrew-cask,asins/homebrew-cask,samdoran/homebrew-cask,mikem/homebrew-cask,aguynamedryan/homebrew-cask,jgarber623/homebrew-cask,jconley/homebrew-cask,mjgardner/homebrew-cask,rajiv/homebrew-cask,cedwardsmedia/homebrew-cask,Fedalto/homebrew-cask,daften/homebrew-cask,johnjelinek/homebrew-cask,chrisfinazzo/homebrew-cask,brianshumate/homebrew-cask,mauricerkelly/homebrew-cask,lantrix/homebrew-cask,kronicd/homebrew-cask,xyb/homebrew-cask,gilesdring/homebrew-cask,JikkuJose/homebrew-cask,mikem/homebrew-cask,hovancik/homebrew-cask,goxberry/homebrew-cask,yuhki50/homebrew-cask,yurikoles/homebrew-cask,dustinblackman/homebrew-cask,sosedoff/homebrew-cask,jayshao/homebrew-cask,tsparber/homebrew-cask,BenjaminHCCarr/homebrew-cask,linc01n/homebrew-cask,cblecker/homebrew-cask,troyxmccall/homebrew-cask,tmoreira2020/homebrew,jeroenseegers/homebrew-cask,n8henrie/homebrew-cask,dwihn0r/homebrew-cask,scribblemaniac/homebrew-cask,dvdoliveira/homebrew-cask,mchlrmrz/homebrew-cask,uetchy/homebrew-cask,scottsuch/homebrew-cask,jpmat296/homebrew-cask,gmkey/homebrew-cask,tan9/homebrew-cask,JikkuJose/homebrew-cask,mrmachine/homebrew-cask,jellyfishcoder/homebrew-cask,giannitm/homebrew-cask,exherb/homebrew-cask,helloIAmPau/homebrew-cask,adrianchia/homebrew-cask,julionc/homebrew-cask,tjt263/homebrew-cask,syscrusher/homebrew-cask,muan/homebrew-cask,lucasmezencio/homebrew-cask,riyad/homebrew-cask,napaxton/homebrew-cask,timsutton/homebrew-cask,cprecioso/homebrew-cask,julionc/homebrew-cask,josa42/homebrew-cask,tedbundyjr/homebrew-cask,mingzhi22/homebrew-cask,Fedalto/homebrew-cask,moogar0880/homebrew-cask,mattrobenolt/homebrew-cask,anbotero/homebrew-cask,feigaochn/homebrew-cask,mauricerkelly/homebrew-cask,samdoran/homebrew-cask,wastrachan/homebrew-cask,thehunmonkgroup/homebrew-cask,vin047/homebrew-cask,mlocher/homebrew-cask,yutarody/homebrew-cask,muan/homebrew-cask,wickedsp1d3r/homebrew-cask,robbiethegeek/homebrew-cask,n8henrie/homebrew-cask,jiashuw/homebrew-cask,wickles/homebrew-cask,FinalDes/homebrew-cask,kpearson/homebrew-cask,stephenwade/homebrew-cask,paour/homebrew-cask,faun/homebrew-cask,boecko/homebrew-cask,kingthorin/homebrew-cask,jawshooah/homebrew-cask,forevergenin/homebrew-cask,haha1903/homebrew-cask,phpwutz/homebrew-cask,tolbkni/homebrew-cask,kkdd/homebrew-cask,renard/homebrew-cask,hellosky806/homebrew-cask,uetchy/homebrew-cask,tedski/homebrew-cask,usami-k/homebrew-cask,guerrero/homebrew-cask,winkelsdorf/homebrew-cask,albertico/homebrew-cask,6uclz1/homebrew-cask,xight/homebrew-cask,mariusbutuc/homebrew-cask,jpmat296/homebrew-cask,singingwolfboy/homebrew-cask,jhowtan/homebrew-cask,CameronGarrett/homebrew-cask,KosherBacon/homebrew-cask,wKovacs64/homebrew-cask,sebcode/homebrew-cask,ldong/homebrew-cask,y00rb/homebrew-cask,retrography/homebrew-cask,esebastian/homebrew-cask,anbotero/homebrew-cask,esebastian/homebrew-cask,artdevjs/homebrew-cask,morganestes/homebrew-cask,jonathanwiesel/homebrew-cask,williamboman/homebrew-cask,mingzhi22/homebrew-cask,cblecker/homebrew-cask,franklouwers/homebrew-cask,danielbayley/homebrew-cask,rickychilcott/homebrew-cask,kamilboratynski/homebrew-cask,jaredsampson/homebrew-cask,zerrot/homebrew-cask,Saklad5/homebrew-cask,athrunsun/homebrew-cask,sscotth/homebrew-cask,esebastian/homebrew-cask,seanorama/homebrew-cask,winkelsdorf/homebrew-cask,lifepillar/homebrew-cask,stonehippo/homebrew-cask,rogeriopradoj/homebrew-cask,larseggert/homebrew-cask,wmorin/homebrew-cask,zmwangx/homebrew-cask,albertico/homebrew-cask,tolbkni/homebrew-cask,gyndav/homebrew-cask,ericbn/homebrew-cask,shonjir/homebrew-cask,timsutton/homebrew-cask,dwihn0r/homebrew-cask,tjt263/homebrew-cask,howie/homebrew-cask,josa42/homebrew-cask,andyli/homebrew-cask,bosr/homebrew-cask,decrement/homebrew-cask,stonehippo/homebrew-cask,ianyh/homebrew-cask,vigosan/homebrew-cask,Cottser/homebrew-cask,FredLackeyOfficial/homebrew-cask,squid314/homebrew-cask,robertgzr/homebrew-cask,inta/homebrew-cask,reitermarkus/homebrew-cask,fanquake/homebrew-cask,kassi/homebrew-cask,thii/homebrew-cask,theoriginalgri/homebrew-cask,ksylvan/homebrew-cask,rajiv/homebrew-cask,colindean/homebrew-cask,m3nu/homebrew-cask,MichaelPei/homebrew-cask,klane/homebrew-cask,okket/homebrew-cask,elnappo/homebrew-cask,moimikey/homebrew-cask,mwean/homebrew-cask,sanchezm/homebrew-cask,ninjahoahong/homebrew-cask,mjgardner/homebrew-cask,kTitan/homebrew-cask,buo/homebrew-cask,mjdescy/homebrew-cask,mchlrmrz/homebrew-cask,hyuna917/homebrew-cask,rogeriopradoj/homebrew-cask,pkq/homebrew-cask,daften/homebrew-cask,asbachb/homebrew-cask,ericbn/homebrew-cask,markthetech/homebrew-cask,Ketouem/homebrew-cask,kteru/homebrew-cask,singingwolfboy/homebrew-cask,jeanregisser/homebrew-cask,miccal/homebrew-cask,FranklinChen/homebrew-cask,cfillion/homebrew-cask,jhowtan/homebrew-cask,okket/homebrew-cask,chuanxd/homebrew-cask,blogabe/homebrew-cask,dwkns/homebrew-cask,Amorymeltzer/homebrew-cask,bric3/homebrew-cask,englishm/homebrew-cask,kesara/homebrew-cask,shorshe/homebrew-cask,renaudguerin/homebrew-cask,pkq/homebrew-cask,robbiethegeek/homebrew-cask,lcasey001/homebrew-cask,jbeagley52/homebrew-cask,Labutin/homebrew-cask,jangalinski/homebrew-cask,afh/homebrew-cask,jmeridth/homebrew-cask,hakamadare/homebrew-cask,tjnycum/homebrew-cask,Amorymeltzer/homebrew-cask,6uclz1/homebrew-cask,ahundt/homebrew-cask,dictcp/homebrew-cask,kteru/homebrew-cask,sanyer/homebrew-cask,seanzxx/homebrew-cask,thomanq/homebrew-cask,blainesch/homebrew-cask,coeligena/homebrew-customized,jeroenseegers/homebrew-cask,neverfox/homebrew-cask,puffdad/homebrew-cask,diguage/homebrew-cask,thehunmonkgroup/homebrew-cask,pkq/homebrew-cask,pacav69/homebrew-cask,wmorin/homebrew-cask,tangestani/homebrew-cask,kesara/homebrew-cask,colindunn/homebrew-cask,optikfluffel/homebrew-cask,jellyfishcoder/homebrew-cask,kingthorin/homebrew-cask,onlynone/homebrew-cask,howie/homebrew-cask,onlynone/homebrew-cask,gyndav/homebrew-cask,ksato9700/homebrew-cask,colindunn/homebrew-cask,samnung/homebrew-cask,phpwutz/homebrew-cask,adrianchia/homebrew-cask,hovancik/homebrew-cask,shonjir/homebrew-cask,cblecker/homebrew-cask,a1russell/homebrew-cask,rogeriopradoj/homebrew-cask,BenjaminHCCarr/homebrew-cask,jmeridth/homebrew-cask | ruby | ## Code Before:
cask :v1 => 'xscope' do
version '4.1.4'
sha256 '6617f7ec94e22d3e1e137bf6386188c827575e7778f8d1d69dce8817efa752f2'
url "https://iconfactory.com/assets/software/xscope/xScope-#{version}.zip"
appcast 'https://iconfactory.com/appcasts/xScope/appcast.xml',
:sha256 => '29eaf2c30992f4c72e4a98d96ce912a1cfde16d751a6b68044ef3c116672d8eb'
name 'xScope'
homepage 'http://iconfactory.com/software/xscope'
license :commercial
app 'xScope.app'
end
## Instruction:
Update SHA256 for xScope 4.1.4
## Code After:
cask :v1 => 'xscope' do
version '4.1.4'
sha256 'd9f1eab342b885277344829eb7b10fa636b76991a9367fb2319f32a48a386ea2'
url "https://iconfactory.com/assets/software/xscope/xScope-#{version}.zip"
appcast 'https://iconfactory.com/appcasts/xScope/appcast.xml',
:sha256 => '29eaf2c30992f4c72e4a98d96ce912a1cfde16d751a6b68044ef3c116672d8eb'
name 'xScope'
homepage 'http://iconfactory.com/software/xscope'
license :commercial
app 'xScope.app'
end
| cask :v1 => 'xscope' do
version '4.1.4'
- sha256 '6617f7ec94e22d3e1e137bf6386188c827575e7778f8d1d69dce8817efa752f2'
+ sha256 'd9f1eab342b885277344829eb7b10fa636b76991a9367fb2319f32a48a386ea2'
url "https://iconfactory.com/assets/software/xscope/xScope-#{version}.zip"
appcast 'https://iconfactory.com/appcasts/xScope/appcast.xml',
:sha256 => '29eaf2c30992f4c72e4a98d96ce912a1cfde16d751a6b68044ef3c116672d8eb'
name 'xScope'
homepage 'http://iconfactory.com/software/xscope'
license :commercial
app 'xScope.app'
end | 2 | 0.153846 | 1 | 1 |
d3ecf850a4502e0f51ffad56bb1a65a353ea1f98 | doc/source/code_ref/image.rst | doc/source/code_ref/image.rst | SunPy image
===========
.. automodapi:: sunpy.image
.. automodapi:: sunpy.image.rescale
:headings: ".#"
| SunPy image
===========
.. automodapi:: sunpy.image
.. automodapi:: sunpy.image.rescale
:headings: ".#"
.. automodapi:: sunpy.image.coalignment
:headings: ".#"
| Add the coalignment documentation to the API docs | Add the coalignment documentation to the API docs
| reStructuredText | bsd-2-clause | Alex-Ian-Hamilton/sunpy,Alex-Ian-Hamilton/sunpy,dpshelio/sunpy,Alex-Ian-Hamilton/sunpy,dpshelio/sunpy,dpshelio/sunpy | restructuredtext | ## Code Before:
SunPy image
===========
.. automodapi:: sunpy.image
.. automodapi:: sunpy.image.rescale
:headings: ".#"
## Instruction:
Add the coalignment documentation to the API docs
## Code After:
SunPy image
===========
.. automodapi:: sunpy.image
.. automodapi:: sunpy.image.rescale
:headings: ".#"
.. automodapi:: sunpy.image.coalignment
:headings: ".#"
| SunPy image
===========
.. automodapi:: sunpy.image
.. automodapi:: sunpy.image.rescale
:headings: ".#"
+
+ .. automodapi:: sunpy.image.coalignment
+ :headings: ".#" | 3 | 0.428571 | 3 | 0 |
3cc4a175e58c9dcd1d36a4cef6f7954bdc035c25 | src/edu/yalestc/yalepublic/MapView.java | src/edu/yalestc/yalepublic/MapView.java | package edu.yalestc.yalepublic;
import android.app.Activity;
import android.os.Bundle;
/**
* Created by Jason Liu on 10/18/14.
*/
public class MapView extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.map_simple);
}
}
| package edu.yalestc.yalepublic;
import android.app.Activity;
import android.os.Bundle;
import com.google.android.gms.maps.CameraUpdateFactory;
import com.google.android.gms.maps.GoogleMap;
import com.google.android.gms.maps.MapFragment;
import com.google.android.gms.maps.model.LatLng;
import com.google.android.gms.maps.model.MarkerOptions;
/**
* Created by Jason Liu on 10/18/14.
*/
public class MapView extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.map_simple);
// Get a handle to the Map Fragment
GoogleMap map = ((MapFragment) getFragmentManager()
.findFragmentById(R.id.map)).getMap();
LatLng yale = new LatLng(41.3111, -72.9267); // The - signifies the Western Hemisphere
map.setMyLocationEnabled(true);
map.moveCamera(CameraUpdateFactory.newLatLngZoom(yale, 13));
map.addMarker(new MarkerOptions()
.title("Yale")
.snippet("The best university ever.")
.position(yale));
}
}
| Add centered map on our location | Add centered map on our location
| Java | mit | YaleSTC/YalePublic-android,YaleSTC/YalePublic-android | java | ## Code Before:
package edu.yalestc.yalepublic;
import android.app.Activity;
import android.os.Bundle;
/**
* Created by Jason Liu on 10/18/14.
*/
public class MapView extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.map_simple);
}
}
## Instruction:
Add centered map on our location
## Code After:
package edu.yalestc.yalepublic;
import android.app.Activity;
import android.os.Bundle;
import com.google.android.gms.maps.CameraUpdateFactory;
import com.google.android.gms.maps.GoogleMap;
import com.google.android.gms.maps.MapFragment;
import com.google.android.gms.maps.model.LatLng;
import com.google.android.gms.maps.model.MarkerOptions;
/**
* Created by Jason Liu on 10/18/14.
*/
public class MapView extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.map_simple);
// Get a handle to the Map Fragment
GoogleMap map = ((MapFragment) getFragmentManager()
.findFragmentById(R.id.map)).getMap();
LatLng yale = new LatLng(41.3111, -72.9267); // The - signifies the Western Hemisphere
map.setMyLocationEnabled(true);
map.moveCamera(CameraUpdateFactory.newLatLngZoom(yale, 13));
map.addMarker(new MarkerOptions()
.title("Yale")
.snippet("The best university ever.")
.position(yale));
}
}
| package edu.yalestc.yalepublic;
import android.app.Activity;
import android.os.Bundle;
+
+ import com.google.android.gms.maps.CameraUpdateFactory;
+ import com.google.android.gms.maps.GoogleMap;
+ import com.google.android.gms.maps.MapFragment;
+ import com.google.android.gms.maps.model.LatLng;
+ import com.google.android.gms.maps.model.MarkerOptions;
/**
* Created by Jason Liu on 10/18/14.
*/
public class MapView extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.map_simple);
+
+ // Get a handle to the Map Fragment
+ GoogleMap map = ((MapFragment) getFragmentManager()
+ .findFragmentById(R.id.map)).getMap();
+
+ LatLng yale = new LatLng(41.3111, -72.9267); // The - signifies the Western Hemisphere
+
+ map.setMyLocationEnabled(true);
+ map.moveCamera(CameraUpdateFactory.newLatLngZoom(yale, 13));
+
+ map.addMarker(new MarkerOptions()
+ .title("Yale")
+ .snippet("The best university ever.")
+ .position(yale));
}
} | 20 | 1.176471 | 20 | 0 |
75b5fdd2b6627081bd1eb095169ed5f8ac6f3090 | about_me/index.html | about_me/index.html | <!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>CS290: Homework</title>
</head>
<body>
<main>
<img alt="Andy Chase" src="portrait.jpeg" /><br />
<a href="mailto:chasean@oregonstate.edu">chasean@oregonstate.edu</a>
</main>
</body>
</html> | <!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>CS290: Homework</title>
</head>
<body>
<main>
<img alt="Andy Chase" src="portrait.jpeg"/><br/>
<a href="mailto:chasean@oregonstate.edu">chasean@oregonstate.edu</a>
<div class="schedule">
<h2>My schedule</h2>
<table border="1">
<thead>
<tr>
<th></th>
<th>Mon</th>
<th>Tue</th>
<th>Wed</th>
<th>Thurs</th>
<th>Fri</th>
</tr>
</thead>
<tbody>
<tr>
<th scope="row">10a</th>
<td></td>
<td>CS 419</td>
<td></td>
<td>CS 419
</td>
<td></td>
</tr>
<tr>
<th>11a</th>
<td></td>
<td></td>
<td></td>
<td></td>
<td></td>
</tr>
<tr>
<th>12p</th>
<td></td>
<td>CS 461</td>
<td></td>
<td>CS 461</td>
<td></td>
</tr>
<tr>
<th>1p</th>
<td></td>
<td></td>
<td></td>
<td></td>
<td></td>
</tr>
<tr>
<th>2p</th>
<td>CS 290</td>
<td></td>
<td>CS 290</td>
<td></td>
<td>CS 290</td>
</tr>
</tbody>
</table>
</div>
</main>
</body>
</html> | Add schedule to "about me" | Add schedule to "about me"
| HTML | mit | andychase/classwork,andychase/classwork,andychase/classwork,andychase/classwork,andychase/classwork,andychase/classwork,andychase/classwork,andychase/classwork | html | ## Code Before:
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>CS290: Homework</title>
</head>
<body>
<main>
<img alt="Andy Chase" src="portrait.jpeg" /><br />
<a href="mailto:chasean@oregonstate.edu">chasean@oregonstate.edu</a>
</main>
</body>
</html>
## Instruction:
Add schedule to "about me"
## Code After:
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>CS290: Homework</title>
</head>
<body>
<main>
<img alt="Andy Chase" src="portrait.jpeg"/><br/>
<a href="mailto:chasean@oregonstate.edu">chasean@oregonstate.edu</a>
<div class="schedule">
<h2>My schedule</h2>
<table border="1">
<thead>
<tr>
<th></th>
<th>Mon</th>
<th>Tue</th>
<th>Wed</th>
<th>Thurs</th>
<th>Fri</th>
</tr>
</thead>
<tbody>
<tr>
<th scope="row">10a</th>
<td></td>
<td>CS 419</td>
<td></td>
<td>CS 419
</td>
<td></td>
</tr>
<tr>
<th>11a</th>
<td></td>
<td></td>
<td></td>
<td></td>
<td></td>
</tr>
<tr>
<th>12p</th>
<td></td>
<td>CS 461</td>
<td></td>
<td>CS 461</td>
<td></td>
</tr>
<tr>
<th>1p</th>
<td></td>
<td></td>
<td></td>
<td></td>
<td></td>
</tr>
<tr>
<th>2p</th>
<td>CS 290</td>
<td></td>
<td>CS 290</td>
<td></td>
<td>CS 290</td>
</tr>
</tbody>
</table>
</div>
</main>
</body>
</html> | <!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>CS290: Homework</title>
</head>
<body>
<main>
- <img alt="Andy Chase" src="portrait.jpeg" /><br />
? - -
+ <img alt="Andy Chase" src="portrait.jpeg"/><br/>
<a href="mailto:chasean@oregonstate.edu">chasean@oregonstate.edu</a>
+
+ <div class="schedule">
+ <h2>My schedule</h2>
+ <table border="1">
+ <thead>
+ <tr>
+ <th></th>
+ <th>Mon</th>
+ <th>Tue</th>
+ <th>Wed</th>
+ <th>Thurs</th>
+ <th>Fri</th>
+ </tr>
+ </thead>
+ <tbody>
+ <tr>
+ <th scope="row">10a</th>
+ <td></td>
+ <td>CS 419</td>
+ <td></td>
+ <td>CS 419
+ </td>
+ <td></td>
+ </tr>
+ <tr>
+ <th>11a</th>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ </tr>
+ <tr>
+ <th>12p</th>
+ <td></td>
+ <td>CS 461</td>
+ <td></td>
+ <td>CS 461</td>
+ <td></td>
+ </tr>
+ <tr>
+ <th>1p</th>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ </tr>
+ <tr>
+ <th>2p</th>
+ <td>CS 290</td>
+ <td></td>
+ <td>CS 290</td>
+ <td></td>
+ <td>CS 290</td>
+
+
+ </tr>
+ </tbody>
+ </table>
+ </div>
</main>
+
</body>
</html> | 64 | 4.266667 | 63 | 1 |
3f37fa7bb791b67e9f7d3ad82c5e3a951c985ed1 | app/views/scrapers/_data.html.haml | app/views/scrapers/_data.html.haml | - if @rows && !@rows.empty?
%h3 Data
%p
- unless signed_in?
To download data
= link_to "sign in with GitHub", user_omniauth_authorize_path(:github), class: "btn btn-default"
.btn-group
= link_to "Download table (as CSV)", scraper_data_path(@scraper, :format => :csv), class: "btn btn-default", disabled: !signed_in?
= link_to "Download SQLite database", scraper_data_path(@scraper, :format => :sqlite), class: "btn btn-default", disabled: !signed_in?
= link_to "Use the API", api_documentation_index_path(scraper: @scraper.full_name), class: "btn btn-default"
%p rows #{[10, @scraper.database.no_rows].min} / #{@scraper.database.no_rows} (#{number_to_human_size @scraper.database.sqlite_db_size})
.table-responsive.scraper-data
%table.table.table-striped.table-bordered.table-condensed
%tr
- @rows.first.each_key do |field|
%th= field
- @rows.each do |row|
%tr
- row.each_value do |v|
%td
%div.has-popover{data: {toggle: "popover", placement: "bottom", trigger: "hover", content: v, container: "body"}}
= auto_link(v.to_s)
| - if @rows && !@rows.empty?
%h3 Data
%p
- unless signed_in?
To download data
= link_to "sign in with GitHub", user_omniauth_authorize_path(:github), class: "btn btn-default"
.btn-group
= link_to "Download table (as CSV)", scraper_data_path(@scraper, format: :csv, query: @scraper.database.select_all), class: "btn btn-default", disabled: !signed_in?
= link_to "Download SQLite database", scraper_data_path(@scraper, :format => :sqlite), class: "btn btn-default", disabled: !signed_in?
= link_to "Use the API", api_documentation_index_path(scraper: @scraper.full_name), class: "btn btn-default"
%p rows #{[10, @scraper.database.no_rows].min} / #{@scraper.database.no_rows} (#{number_to_human_size @scraper.database.sqlite_db_size})
.table-responsive.scraper-data
%table.table.table-striped.table-bordered.table-condensed
%tr
- @rows.first.each_key do |field|
%th= field
- @rows.each do |row|
%tr
- row.each_value do |v|
%td
%div.has-popover{data: {toggle: "popover", placement: "bottom", trigger: "hover", content: v, container: "body"}}
= auto_link(v.to_s)
| Include full query in csv download link | Include full query in csv download link
| Haml | agpl-3.0 | OpenAddressesUK/morph,OpenAddressesUK/morph,OpenAddressesUK/morph,otherchirps/morph,otherchirps/morph,otherchirps/morph,otherchirps/morph,OpenAddressesUK/morph,openaustralia/morph,openaustralia/morph,otherchirps/morph,openaustralia/morph,openaustralia/morph,openaustralia/morph,otherchirps/morph,otherchirps/morph,openaustralia/morph,openaustralia/morph | haml | ## Code Before:
- if @rows && !@rows.empty?
%h3 Data
%p
- unless signed_in?
To download data
= link_to "sign in with GitHub", user_omniauth_authorize_path(:github), class: "btn btn-default"
.btn-group
= link_to "Download table (as CSV)", scraper_data_path(@scraper, :format => :csv), class: "btn btn-default", disabled: !signed_in?
= link_to "Download SQLite database", scraper_data_path(@scraper, :format => :sqlite), class: "btn btn-default", disabled: !signed_in?
= link_to "Use the API", api_documentation_index_path(scraper: @scraper.full_name), class: "btn btn-default"
%p rows #{[10, @scraper.database.no_rows].min} / #{@scraper.database.no_rows} (#{number_to_human_size @scraper.database.sqlite_db_size})
.table-responsive.scraper-data
%table.table.table-striped.table-bordered.table-condensed
%tr
- @rows.first.each_key do |field|
%th= field
- @rows.each do |row|
%tr
- row.each_value do |v|
%td
%div.has-popover{data: {toggle: "popover", placement: "bottom", trigger: "hover", content: v, container: "body"}}
= auto_link(v.to_s)
## Instruction:
Include full query in csv download link
## Code After:
- if @rows && !@rows.empty?
%h3 Data
%p
- unless signed_in?
To download data
= link_to "sign in with GitHub", user_omniauth_authorize_path(:github), class: "btn btn-default"
.btn-group
= link_to "Download table (as CSV)", scraper_data_path(@scraper, format: :csv, query: @scraper.database.select_all), class: "btn btn-default", disabled: !signed_in?
= link_to "Download SQLite database", scraper_data_path(@scraper, :format => :sqlite), class: "btn btn-default", disabled: !signed_in?
= link_to "Use the API", api_documentation_index_path(scraper: @scraper.full_name), class: "btn btn-default"
%p rows #{[10, @scraper.database.no_rows].min} / #{@scraper.database.no_rows} (#{number_to_human_size @scraper.database.sqlite_db_size})
.table-responsive.scraper-data
%table.table.table-striped.table-bordered.table-condensed
%tr
- @rows.first.each_key do |field|
%th= field
- @rows.each do |row|
%tr
- row.each_value do |v|
%td
%div.has-popover{data: {toggle: "popover", placement: "bottom", trigger: "hover", content: v, container: "body"}}
= auto_link(v.to_s)
| - if @rows && !@rows.empty?
%h3 Data
%p
- unless signed_in?
To download data
= link_to "sign in with GitHub", user_omniauth_authorize_path(:github), class: "btn btn-default"
.btn-group
- = link_to "Download table (as CSV)", scraper_data_path(@scraper, :format => :csv), class: "btn btn-default", disabled: !signed_in?
? - ^^^
+ = link_to "Download table (as CSV)", scraper_data_path(@scraper, format: :csv, query: @scraper.database.select_all), class: "btn btn-default", disabled: !signed_in?
? ^ +++++++++++++++++++++++++++++++++++++
= link_to "Download SQLite database", scraper_data_path(@scraper, :format => :sqlite), class: "btn btn-default", disabled: !signed_in?
= link_to "Use the API", api_documentation_index_path(scraper: @scraper.full_name), class: "btn btn-default"
%p rows #{[10, @scraper.database.no_rows].min} / #{@scraper.database.no_rows} (#{number_to_human_size @scraper.database.sqlite_db_size})
.table-responsive.scraper-data
%table.table.table-striped.table-bordered.table-condensed
%tr
- @rows.first.each_key do |field|
%th= field
- @rows.each do |row|
%tr
- row.each_value do |v|
%td
%div.has-popover{data: {toggle: "popover", placement: "bottom", trigger: "hover", content: v, container: "body"}}
= auto_link(v.to_s) | 2 | 0.090909 | 1 | 1 |
525e580d44a4f27accba27b7650e4d78e70f302d | src/List/ListItem.js | src/List/ListItem.js | import classNames from 'classnames';
import React, {PropTypes} from 'react';
import Util from '../Util/Util';
const CSSTransitionGroup = React.addons.CSSTransitionGroup;
export default class ListItem extends React.Component {
render() {
let defaultClass = ListItem.defaultProps.className;
let classes = classNames(this.props.className, defaultClass);
let Tag = this.props.tag;
// Uses all passed properties as attributes, excluding propTypes
let attributes = Util.exclude(this.props, Object.keys(ListItem.propTypes)) || {};
if (attributes.transition) {
return (
<CSSTransitionGroup
{...attributes}
className={classes}
component={this.props.tag}>
{this.props.children}
</CSSTransitionGroup>
);
}
return (
<Tag {...attributes} className={classes}>
{this.props.children}
</Tag>
);
}
}
ListItem.defaultProps = {
className: 'list-item',
tag: 'li'
};
ListItem.propTypes = {
children: PropTypes.node,
className: PropTypes.string,
tag: PropTypes.string
};
| import React, {PropTypes} from 'react';
import Util from '../Util/Util';
const CSSTransitionGroup = React.addons.CSSTransitionGroup;
export default class ListItem extends React.Component {
render() {
let Tag = this.props.tag;
// Uses all passed properties as attributes, excluding propTypes
let attributes = Util.exclude(this.props, Object.keys(ListItem.propTypes)) || {};
if (attributes.transition) {
return (
<CSSTransitionGroup
{...attributes}
className={this.props.className}
component={this.props.tag}>
{this.props.children}
</CSSTransitionGroup>
);
}
return (
<Tag {...attributes} className={this.props.className}>
{this.props.children}
</Tag>
);
}
}
ListItem.defaultProps = {
className: 'list-item',
tag: 'li'
};
ListItem.propTypes = {
children: PropTypes.node,
className: PropTypes.string,
tag: PropTypes.string
};
| Fix duplicated classnames being applied to list items | Fix duplicated classnames being applied to list items
| JavaScript | apache-2.0 | mesosphere/reactjs-components,mesosphere/reactjs-components | javascript | ## Code Before:
import classNames from 'classnames';
import React, {PropTypes} from 'react';
import Util from '../Util/Util';
const CSSTransitionGroup = React.addons.CSSTransitionGroup;
export default class ListItem extends React.Component {
render() {
let defaultClass = ListItem.defaultProps.className;
let classes = classNames(this.props.className, defaultClass);
let Tag = this.props.tag;
// Uses all passed properties as attributes, excluding propTypes
let attributes = Util.exclude(this.props, Object.keys(ListItem.propTypes)) || {};
if (attributes.transition) {
return (
<CSSTransitionGroup
{...attributes}
className={classes}
component={this.props.tag}>
{this.props.children}
</CSSTransitionGroup>
);
}
return (
<Tag {...attributes} className={classes}>
{this.props.children}
</Tag>
);
}
}
ListItem.defaultProps = {
className: 'list-item',
tag: 'li'
};
ListItem.propTypes = {
children: PropTypes.node,
className: PropTypes.string,
tag: PropTypes.string
};
## Instruction:
Fix duplicated classnames being applied to list items
## Code After:
import React, {PropTypes} from 'react';
import Util from '../Util/Util';
const CSSTransitionGroup = React.addons.CSSTransitionGroup;
export default class ListItem extends React.Component {
render() {
let Tag = this.props.tag;
// Uses all passed properties as attributes, excluding propTypes
let attributes = Util.exclude(this.props, Object.keys(ListItem.propTypes)) || {};
if (attributes.transition) {
return (
<CSSTransitionGroup
{...attributes}
className={this.props.className}
component={this.props.tag}>
{this.props.children}
</CSSTransitionGroup>
);
}
return (
<Tag {...attributes} className={this.props.className}>
{this.props.children}
</Tag>
);
}
}
ListItem.defaultProps = {
className: 'list-item',
tag: 'li'
};
ListItem.propTypes = {
children: PropTypes.node,
className: PropTypes.string,
tag: PropTypes.string
};
| - import classNames from 'classnames';
import React, {PropTypes} from 'react';
import Util from '../Util/Util';
const CSSTransitionGroup = React.addons.CSSTransitionGroup;
export default class ListItem extends React.Component {
render() {
- let defaultClass = ListItem.defaultProps.className;
- let classes = classNames(this.props.className, defaultClass);
let Tag = this.props.tag;
// Uses all passed properties as attributes, excluding propTypes
let attributes = Util.exclude(this.props, Object.keys(ListItem.propTypes)) || {};
if (attributes.transition) {
return (
<CSSTransitionGroup
{...attributes}
- className={classes}
? -
+ className={this.props.className}
? +++++++++++ +++
component={this.props.tag}>
{this.props.children}
</CSSTransitionGroup>
);
}
return (
- <Tag {...attributes} className={classes}>
? -
+ <Tag {...attributes} className={this.props.className}>
? +++++++++++ +++
{this.props.children}
</Tag>
);
}
}
ListItem.defaultProps = {
className: 'list-item',
tag: 'li'
};
ListItem.propTypes = {
children: PropTypes.node,
className: PropTypes.string,
tag: PropTypes.string
}; | 7 | 0.155556 | 2 | 5 |
679e2c5d5ff626e1888ce847dbc4e4784747eab0 | waitless/app/views/restaurants/_result.html.erb | waitless/app/views/restaurants/_result.html.erb | <div class="result-block">
<div class="result-wrapper">
<div class="result-img"><img src="<%=result[:image]%>"></div>
<div class="result-info">
<div><label>Restaurant Name</label></div>
<div><%=result[:name]%></div>
<div><label>Category</label></div>
<div>
<%=result[:categories]%>
</div>
<div><label>Location</label></div>
<div><%=result[:location][0]%></div>
<div><%=result[:location][2]%></div>
<div>
<label>Rating</label>
<img src="<%=result[:rating]%>">
</div>
</div>
<div class="result-wait-time">
<div class="result-wait-time-text0">Current wait time</div>
<div class="result-wait-time-number">89</div>
<div class="result-wait-time-text1">minute(s)</div>
</div>
</div>
<div class="link-wrapper">
<a href="/restaurants/<%= result[:id] %>" class="result-button">Sign Waitlist</a>
<a href="#" class="result-button">See More</a>
</div>
</div>
| <div class="result-block">
<div class="result-wrapper">
<div class="result-img"><img src="<%=result[:image]%>"></div>
<div class="result-info">
<div><label>Restaurant Name</label></div>
<div><%=result[:name]%></div>
<div><label>Category</label></div>
<div>
<%=result[:categories]%>
</div>
<div><label>Location</label></div>
<div><%=result[:location][0]%></div>
<div><%=result[:location][2]%></div>
<div>
<label>Rating</label>
<img src="<%=result[:rating]%>">
</div>
</div>
<div class="result-wait-time">
<div class="result-wait-time-text0">Current wait time</div>
<div class="result-wait-time-number">89</div>
<div class="result-wait-time-text1">minute(s)</div>
</div>
</div>
<div class="link-wrapper">
<a href="#" class="result-button">Sign Waitlist</a>
<a href="/restaurants/<%= result[:id] %>" class="result-button">See More</a>
</div>
</div>
| Switch button href to proper route | Switch button href to proper route
| HTML+ERB | mit | jchang2014/waitless,erictflores/waitless,jengjao515/WaitLess,jchang2014/waitless,erictflores/waitless,erictflores/waitless,jengjao515/WaitLess,jchang2014/waitless,jengjao515/WaitLess | html+erb | ## Code Before:
<div class="result-block">
<div class="result-wrapper">
<div class="result-img"><img src="<%=result[:image]%>"></div>
<div class="result-info">
<div><label>Restaurant Name</label></div>
<div><%=result[:name]%></div>
<div><label>Category</label></div>
<div>
<%=result[:categories]%>
</div>
<div><label>Location</label></div>
<div><%=result[:location][0]%></div>
<div><%=result[:location][2]%></div>
<div>
<label>Rating</label>
<img src="<%=result[:rating]%>">
</div>
</div>
<div class="result-wait-time">
<div class="result-wait-time-text0">Current wait time</div>
<div class="result-wait-time-number">89</div>
<div class="result-wait-time-text1">minute(s)</div>
</div>
</div>
<div class="link-wrapper">
<a href="/restaurants/<%= result[:id] %>" class="result-button">Sign Waitlist</a>
<a href="#" class="result-button">See More</a>
</div>
</div>
## Instruction:
Switch button href to proper route
## Code After:
<div class="result-block">
<div class="result-wrapper">
<div class="result-img"><img src="<%=result[:image]%>"></div>
<div class="result-info">
<div><label>Restaurant Name</label></div>
<div><%=result[:name]%></div>
<div><label>Category</label></div>
<div>
<%=result[:categories]%>
</div>
<div><label>Location</label></div>
<div><%=result[:location][0]%></div>
<div><%=result[:location][2]%></div>
<div>
<label>Rating</label>
<img src="<%=result[:rating]%>">
</div>
</div>
<div class="result-wait-time">
<div class="result-wait-time-text0">Current wait time</div>
<div class="result-wait-time-number">89</div>
<div class="result-wait-time-text1">minute(s)</div>
</div>
</div>
<div class="link-wrapper">
<a href="#" class="result-button">Sign Waitlist</a>
<a href="/restaurants/<%= result[:id] %>" class="result-button">See More</a>
</div>
</div>
| <div class="result-block">
<div class="result-wrapper">
<div class="result-img"><img src="<%=result[:image]%>"></div>
<div class="result-info">
<div><label>Restaurant Name</label></div>
<div><%=result[:name]%></div>
<div><label>Category</label></div>
<div>
<%=result[:categories]%>
</div>
<div><label>Location</label></div>
<div><%=result[:location][0]%></div>
<div><%=result[:location][2]%></div>
<div>
<label>Rating</label>
<img src="<%=result[:rating]%>">
</div>
</div>
<div class="result-wait-time">
<div class="result-wait-time-text0">Current wait time</div>
<div class="result-wait-time-number">89</div>
<div class="result-wait-time-text1">minute(s)</div>
</div>
</div>
<div class="link-wrapper">
+ <a href="#" class="result-button">Sign Waitlist</a>
- <a href="/restaurants/<%= result[:id] %>" class="result-button">Sign Waitlist</a>
? ^^^ ^^^^^^^^
+ <a href="/restaurants/<%= result[:id] %>" class="result-button">See More</a>
? ^^ ^^^^
- <a href="#" class="result-button">See More</a>
</div>
</div> | 4 | 0.105263 | 2 | 2 |
04832c28db745e61e665e71cf7c8d2c58dc7d087 | src/scripts/introspectCofactsApi.js | src/scripts/introspectCofactsApi.js | import path from 'path';
import fs from 'fs';
import { introspectSchema } from '@graphql-tools/wrap';
import { printSchema } from 'graphql/utilities';
import executor from '../graphql/cofactsSchemaExecutor';
const OUTPUT = path.join(__dirname, `../../data/cofacts-api.graphql`);
introspectSchema(executor)
.then(schema => {
const sdl = printSchema(schema);
fs.writeFileSync(
OUTPUT,
'# Generated from `npm run cofactsapi`\n\n' + sdl,
{
encoding: 'utf-8',
}
);
console.log('Cofacts API schema written to', OUTPUT);
})
.catch(console.error);
| import path from 'path';
import fs from 'fs';
import { introspectSchema } from '@graphql-tools/wrap';
import { printSchema } from 'graphql/utilities';
import executor from '../graphql/cofactsSchemaExecutor';
const PATH_PREFIX = '../../data';
fs.mkdirSync(path.join(__dirname, PATH_PREFIX));
const OUTPUT = path.join(__dirname, `${PATH_PREFIX}/cofacts-api.graphql`);
introspectSchema(executor)
.then(schema => {
const sdl = printSchema(schema);
fs.writeFileSync(
OUTPUT,
'# Generated from `npm run cofactsapi`\n\n' + sdl,
{
encoding: 'utf-8',
}
);
console.log('Cofacts API schema written to', OUTPUT);
})
.catch(console.error);
| Make sure data directory exist when printing server schema | Make sure data directory exist when printing server schema
| JavaScript | mit | cofacts/rumors-line-bot,cofacts/rumors-line-bot,cofacts/rumors-line-bot | javascript | ## Code Before:
import path from 'path';
import fs from 'fs';
import { introspectSchema } from '@graphql-tools/wrap';
import { printSchema } from 'graphql/utilities';
import executor from '../graphql/cofactsSchemaExecutor';
const OUTPUT = path.join(__dirname, `../../data/cofacts-api.graphql`);
introspectSchema(executor)
.then(schema => {
const sdl = printSchema(schema);
fs.writeFileSync(
OUTPUT,
'# Generated from `npm run cofactsapi`\n\n' + sdl,
{
encoding: 'utf-8',
}
);
console.log('Cofacts API schema written to', OUTPUT);
})
.catch(console.error);
## Instruction:
Make sure data directory exist when printing server schema
## Code After:
import path from 'path';
import fs from 'fs';
import { introspectSchema } from '@graphql-tools/wrap';
import { printSchema } from 'graphql/utilities';
import executor from '../graphql/cofactsSchemaExecutor';
const PATH_PREFIX = '../../data';
fs.mkdirSync(path.join(__dirname, PATH_PREFIX));
const OUTPUT = path.join(__dirname, `${PATH_PREFIX}/cofacts-api.graphql`);
introspectSchema(executor)
.then(schema => {
const sdl = printSchema(schema);
fs.writeFileSync(
OUTPUT,
'# Generated from `npm run cofactsapi`\n\n' + sdl,
{
encoding: 'utf-8',
}
);
console.log('Cofacts API schema written to', OUTPUT);
})
.catch(console.error);
| import path from 'path';
import fs from 'fs';
import { introspectSchema } from '@graphql-tools/wrap';
import { printSchema } from 'graphql/utilities';
import executor from '../graphql/cofactsSchemaExecutor';
+ const PATH_PREFIX = '../../data';
+ fs.mkdirSync(path.join(__dirname, PATH_PREFIX));
- const OUTPUT = path.join(__dirname, `../../data/cofacts-api.graphql`);
? ^^^^^^^^^^
+ const OUTPUT = path.join(__dirname, `${PATH_PREFIX}/cofacts-api.graphql`);
? ^^^^^^^^^^^^^^
introspectSchema(executor)
.then(schema => {
const sdl = printSchema(schema);
fs.writeFileSync(
OUTPUT,
'# Generated from `npm run cofactsapi`\n\n' + sdl,
{
encoding: 'utf-8',
}
);
console.log('Cofacts API schema written to', OUTPUT);
})
.catch(console.error); | 4 | 0.181818 | 3 | 1 |
f67fad67b1daebf08dbcbf7a25c696df5bdfbab3 | addon/utils/build-url.js | addon/utils/build-url.js | import Ember from 'ember';
const { assert } = Ember;
export default function buildOperationUrl(record, opPath, requestType, intance=true) {
assert('You must provide a path for instanceOp', opPath);
const modelName = record.constructor.modelName || record.constructor.typeKey;
let adapter = record.store.adapterFor(modelName);
let path = opPath;
let baseUrl = adapter.buildURL(modelName, intance ? record.get('id') : null, requestType);
if (baseUrl.charAt(baseUrl.length - 1) === '/') {
return `${baseUrl}${path}`;
} else {
return `${baseUrl}/${path}`;
}
}
| import Ember from 'ember';
const { assert } = Ember;
export default function buildOperationUrl(record, opPath, requestType, instance=true) {
assert('You must provide a path for instanceOp', opPath);
const modelName = record.constructor.modelName || record.constructor.typeKey;
let adapter = record.store.adapterFor(modelName);
let path = opPath;
let snapshot = record._createSnapshot();
let baseUrl = adapter.buildURL(modelName, instance ? record.get('id') : null, snapshot, requestType);
if (baseUrl.charAt(baseUrl.length - 1) === '/') {
return `${baseUrl}${path}`;
} else {
return `${baseUrl}/${path}`;
}
}
| Fix buildURL to match ember-data signature. | Fix buildURL to match ember-data signature.
This was missing the snapshot argument, which could screw up anyone
overriding buildUrl.
| JavaScript | mit | truenorth/ember-api-actions,truenorth/ember-api-actions,mike-north/ember-api-actions,mike-north/ember-api-actions,mike-north/ember-api-actions,mike-north/ember-api-actions | javascript | ## Code Before:
import Ember from 'ember';
const { assert } = Ember;
export default function buildOperationUrl(record, opPath, requestType, intance=true) {
assert('You must provide a path for instanceOp', opPath);
const modelName = record.constructor.modelName || record.constructor.typeKey;
let adapter = record.store.adapterFor(modelName);
let path = opPath;
let baseUrl = adapter.buildURL(modelName, intance ? record.get('id') : null, requestType);
if (baseUrl.charAt(baseUrl.length - 1) === '/') {
return `${baseUrl}${path}`;
} else {
return `${baseUrl}/${path}`;
}
}
## Instruction:
Fix buildURL to match ember-data signature.
This was missing the snapshot argument, which could screw up anyone
overriding buildUrl.
## Code After:
import Ember from 'ember';
const { assert } = Ember;
export default function buildOperationUrl(record, opPath, requestType, instance=true) {
assert('You must provide a path for instanceOp', opPath);
const modelName = record.constructor.modelName || record.constructor.typeKey;
let adapter = record.store.adapterFor(modelName);
let path = opPath;
let snapshot = record._createSnapshot();
let baseUrl = adapter.buildURL(modelName, instance ? record.get('id') : null, snapshot, requestType);
if (baseUrl.charAt(baseUrl.length - 1) === '/') {
return `${baseUrl}${path}`;
} else {
return `${baseUrl}/${path}`;
}
}
| import Ember from 'ember';
const { assert } = Ember;
- export default function buildOperationUrl(record, opPath, requestType, intance=true) {
+ export default function buildOperationUrl(record, opPath, requestType, instance=true) {
? +
assert('You must provide a path for instanceOp', opPath);
const modelName = record.constructor.modelName || record.constructor.typeKey;
let adapter = record.store.adapterFor(modelName);
let path = opPath;
+ let snapshot = record._createSnapshot();
- let baseUrl = adapter.buildURL(modelName, intance ? record.get('id') : null, requestType);
+ let baseUrl = adapter.buildURL(modelName, instance ? record.get('id') : null, snapshot, requestType);
? + ++++++++++
if (baseUrl.charAt(baseUrl.length - 1) === '/') {
return `${baseUrl}${path}`;
} else {
return `${baseUrl}/${path}`;
}
} | 5 | 0.294118 | 3 | 2 |
c216a1b9cef21e066d9d57434092e7cc542f47ae | .travis.yml | .travis.yml |
before_script:
- ./test/setup/setup.sh
env:
global:
- BUILD_DIR="$TRAVIS_BUILD_DIR"
- SERVER_SPECIFIC_TESTS="test/tests.js"
language: node_js
matrix:
include:
# Test for Apache 2.2.x
- env:
- APACHE_VERSION="2.2.x"
node_js: "0.10"
script:
- sudo service apache2 restart
- npm test
# Test for Apache 2.4.x
- env:
- APACHE_VERSION="2.4.x"
node_js: "0.10"
script:
- sudo /usr/local/apache2/bin/apachectl start
- npm test
|
before_script:
- ./test/setup/setup.sh
env:
global:
- BUILD_DIR="$TRAVIS_BUILD_DIR"
- SERVER_SPECIFIC_TESTS="test/tests.js"
git:
depth: 10
language: node_js
matrix:
include:
# Test for Apache 2.2.x
- env:
- APACHE_VERSION="2.2.x"
node_js: "0.10"
script:
- sudo service apache2 restart
- npm test
# Test for Apache 2.4.x
- env:
- APACHE_VERSION="2.4.x"
node_js: "0.10"
script:
- sudo /usr/local/apache2/bin/apachectl start
- npm test
| Make Travis CI limit the Git clone depth even more | Make Travis CI limit the Git clone depth even more
In order to speed up the tests a little bit more, this commit informs
Travis CI to limit the Git clone depth to 10 commits (default is 50).
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
e.g.:
$ time g clone --depth 10 git@github.com:h5bp/server-configs-apache.git
...
real 0m2.549s
user 0m0.068s
sys 0m0.048s
$ time g clone --depth 50 git@github.com:h5bp/server-configs-apache.git
...
real 0m3.139s
user 0m0.128s
sys 0m0.024s
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
Ref: http://git-scm.com/docs/git-clone
https://twitter.com/travisci/status/288390896339267584
| YAML | mit | StephanieMak/server-configs-apache,blackmoral/server-configs-apache,Gillespie59/server-configs-apache,StephanieMak/server-configs-apache,voku/server-configs-apache,voku/server-configs-apache,atyenoria/server-configs-apache,johnnyicarus/server-configs-apache,blackmoral/server-configs-apache,walterebert/server-configs-apache,johnnyicarus/server-configs-apache,atyenoria/server-configs-apache,h5bp/server-configs-apache,walterebert/server-configs-apache,sengkoil/server-configs-apache,Gillespie59/server-configs-apache,sengkoil/server-configs-apache | yaml | ## Code Before:
before_script:
- ./test/setup/setup.sh
env:
global:
- BUILD_DIR="$TRAVIS_BUILD_DIR"
- SERVER_SPECIFIC_TESTS="test/tests.js"
language: node_js
matrix:
include:
# Test for Apache 2.2.x
- env:
- APACHE_VERSION="2.2.x"
node_js: "0.10"
script:
- sudo service apache2 restart
- npm test
# Test for Apache 2.4.x
- env:
- APACHE_VERSION="2.4.x"
node_js: "0.10"
script:
- sudo /usr/local/apache2/bin/apachectl start
- npm test
## Instruction:
Make Travis CI limit the Git clone depth even more
In order to speed up the tests a little bit more, this commit informs
Travis CI to limit the Git clone depth to 10 commits (default is 50).
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
e.g.:
$ time g clone --depth 10 git@github.com:h5bp/server-configs-apache.git
...
real 0m2.549s
user 0m0.068s
sys 0m0.048s
$ time g clone --depth 50 git@github.com:h5bp/server-configs-apache.git
...
real 0m3.139s
user 0m0.128s
sys 0m0.024s
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
Ref: http://git-scm.com/docs/git-clone
https://twitter.com/travisci/status/288390896339267584
## Code After:
before_script:
- ./test/setup/setup.sh
env:
global:
- BUILD_DIR="$TRAVIS_BUILD_DIR"
- SERVER_SPECIFIC_TESTS="test/tests.js"
git:
depth: 10
language: node_js
matrix:
include:
# Test for Apache 2.2.x
- env:
- APACHE_VERSION="2.2.x"
node_js: "0.10"
script:
- sudo service apache2 restart
- npm test
# Test for Apache 2.4.x
- env:
- APACHE_VERSION="2.4.x"
node_js: "0.10"
script:
- sudo /usr/local/apache2/bin/apachectl start
- npm test
|
before_script:
- ./test/setup/setup.sh
env:
global:
- BUILD_DIR="$TRAVIS_BUILD_DIR"
- SERVER_SPECIFIC_TESTS="test/tests.js"
+
+ git:
+ depth: 10
language: node_js
matrix:
include:
# Test for Apache 2.2.x
- env:
- APACHE_VERSION="2.2.x"
node_js: "0.10"
script:
- sudo service apache2 restart
- npm test
# Test for Apache 2.4.x
- env:
- APACHE_VERSION="2.4.x"
node_js: "0.10"
script:
- sudo /usr/local/apache2/bin/apachectl start
- npm test | 3 | 0.103448 | 3 | 0 |
711c19a6f817413f252e96367e8f7d7ee49802c0 | .github/workflows/deploy-pr.yml | .github/workflows/deploy-pr.yml | name: Deploy Pull Request
on:
pull_request_target:
types: [labeled]
env:
COVERAGE: false
jobs:
deploy:
name: Deploy PR Preview
runs-on: ubuntu-latest
if: contains(github.event.pull_request.labels.*.name, 'safe to deploy')
steps:
- uses: actions/checkout@v2
- name: Use Node.js 16
uses: actions/setup-node@v2
with:
node-version: 16
cache: 'npm'
- name: install dependencies
run: npm ci
- name: Ember CLI Deploy
run: node_modules/.bin/ember deploy pr-preview
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
GITHUB_PR_NUMBER: ${{ github.event.number }}
comment:
runs-on: ubuntu-latest
needs: deploy
steps:
- uses: actions/github-script@v5
with:
github-token: ${{secrets.GITHUB_TOKEN}}
script: |
github.rest.issues.createComment({
issue_number: context.issue.number,
owner: context.repo.owner,
repo: context.repo.repo,
body: "Staging build deployed! It can be accessed with `bin/console ilios:update-frontend --staging-build --at-version=pr_preview-${{ github.event.number }}`"
}) | name: Deploy Pull Request
on:
pull_request_target:
types: [labeled,opened,reopened,synchronize]
env:
COVERAGE: false
jobs:
deploy:
name: Deploy PR Preview
runs-on: ubuntu-latest
if: contains(github.event.pull_request.labels.*.name, 'safe to deploy')
steps:
- uses: actions/checkout@v2
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Use Node.js 16
uses: actions/setup-node@v2
with:
node-version: 16
cache: 'npm'
- name: install dependencies
run: npm ci
- name: Ember CLI Deploy
run: node_modules/.bin/ember deploy pr-preview
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
GITHUB_PR_NUMBER: ${{ github.event.number }}
comment:
runs-on: ubuntu-latest
needs: deploy
steps:
- uses: actions/github-script@v5
with:
github-token: ${{secrets.GITHUB_TOKEN}}
script: |
github.rest.issues.createComment({
issue_number: context.issue.number,
owner: context.repo.owner,
repo: context.repo.repo,
body: "Staging build deployed! It can be accessed with `bin/console ilios:update-frontend --staging-build --at-version=pr_preview-${{ github.event.number }}`"
}) | Fix pull request preview action | Fix pull request preview action
The `pull_request_target` event loads to a checkout of `master` and not
the PR commit. Since we're only running this action when a label has
been added we can go ahead and checkout the PR ref and run there. I also
added some more events so deployed PRs will get re-deployed when they
are changed.
| YAML | mit | ilios/frontend,dartajax/frontend,jrjohnson/frontend,jrjohnson/frontend,dartajax/frontend,ilios/frontend | yaml | ## Code Before:
name: Deploy Pull Request
on:
pull_request_target:
types: [labeled]
env:
COVERAGE: false
jobs:
deploy:
name: Deploy PR Preview
runs-on: ubuntu-latest
if: contains(github.event.pull_request.labels.*.name, 'safe to deploy')
steps:
- uses: actions/checkout@v2
- name: Use Node.js 16
uses: actions/setup-node@v2
with:
node-version: 16
cache: 'npm'
- name: install dependencies
run: npm ci
- name: Ember CLI Deploy
run: node_modules/.bin/ember deploy pr-preview
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
GITHUB_PR_NUMBER: ${{ github.event.number }}
comment:
runs-on: ubuntu-latest
needs: deploy
steps:
- uses: actions/github-script@v5
with:
github-token: ${{secrets.GITHUB_TOKEN}}
script: |
github.rest.issues.createComment({
issue_number: context.issue.number,
owner: context.repo.owner,
repo: context.repo.repo,
body: "Staging build deployed! It can be accessed with `bin/console ilios:update-frontend --staging-build --at-version=pr_preview-${{ github.event.number }}`"
})
## Instruction:
Fix pull request preview action
The `pull_request_target` event loads to a checkout of `master` and not
the PR commit. Since we're only running this action when a label has
been added we can go ahead and checkout the PR ref and run there. I also
added some more events so deployed PRs will get re-deployed when they
are changed.
## Code After:
name: Deploy Pull Request
on:
pull_request_target:
types: [labeled,opened,reopened,synchronize]
env:
COVERAGE: false
jobs:
deploy:
name: Deploy PR Preview
runs-on: ubuntu-latest
if: contains(github.event.pull_request.labels.*.name, 'safe to deploy')
steps:
- uses: actions/checkout@v2
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Use Node.js 16
uses: actions/setup-node@v2
with:
node-version: 16
cache: 'npm'
- name: install dependencies
run: npm ci
- name: Ember CLI Deploy
run: node_modules/.bin/ember deploy pr-preview
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
GITHUB_PR_NUMBER: ${{ github.event.number }}
comment:
runs-on: ubuntu-latest
needs: deploy
steps:
- uses: actions/github-script@v5
with:
github-token: ${{secrets.GITHUB_TOKEN}}
script: |
github.rest.issues.createComment({
issue_number: context.issue.number,
owner: context.repo.owner,
repo: context.repo.repo,
body: "Staging build deployed! It can be accessed with `bin/console ilios:update-frontend --staging-build --at-version=pr_preview-${{ github.event.number }}`"
}) | name: Deploy Pull Request
on:
pull_request_target:
- types: [labeled]
+ types: [labeled,opened,reopened,synchronize]
env:
COVERAGE: false
jobs:
deploy:
name: Deploy PR Preview
runs-on: ubuntu-latest
if: contains(github.event.pull_request.labels.*.name, 'safe to deploy')
steps:
- uses: actions/checkout@v2
+ with:
+ ref: ${{ github.event.pull_request.head.sha }}
- name: Use Node.js 16
uses: actions/setup-node@v2
with:
node-version: 16
cache: 'npm'
- name: install dependencies
run: npm ci
- name: Ember CLI Deploy
run: node_modules/.bin/ember deploy pr-preview
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
GITHUB_PR_NUMBER: ${{ github.event.number }}
comment:
runs-on: ubuntu-latest
needs: deploy
steps:
- uses: actions/github-script@v5
with:
github-token: ${{secrets.GITHUB_TOKEN}}
script: |
github.rest.issues.createComment({
issue_number: context.issue.number,
owner: context.repo.owner,
repo: context.repo.repo,
body: "Staging build deployed! It can be accessed with `bin/console ilios:update-frontend --staging-build --at-version=pr_preview-${{ github.event.number }}`"
}) | 4 | 0.093023 | 3 | 1 |
a231d73fcac6c9b2e8b9976a52070b98153bf3df | lib/simulation/openfisca/mapping/reverse.js | lib/simulation/openfisca/mapping/reverse.js | var _ = require('lodash');
var periods = require('./periods');
var PRESTATIONS = require('../prestations');
module.exports = function reverseMap(openFiscaFamille, date, injectedRessources) {
var period = periods.map(date);
// Don't show prestations that have been injected by the user, and not calculated by the simulator
_.forEach(injectedRessources, function(resource) {
delete PRESTATIONS[resource];
});
return _.mapValues(PRESTATIONS, function(format, prestationName) {
var type = format.type,
computedPrestation = openFiscaFamille[prestationName],
result = computedPrestation[period];
var uncomputabilityReason = openFiscaFamille[prestationName + '_non_calculable'] && openFiscaFamille[prestationName + '_non_calculable'][period];
if (uncomputabilityReason) {
return uncomputabilityReason;
}
if (format.montantAnnuel) {
result *= 12;
}
if (type == Number) {
result = Number(result.toFixed(2));
}
return result;
});
};
| var _ = require('lodash');
var periods = require('./periods');
var PRESTATIONS = require('../prestations');
module.exports = function reverseMap(openFiscaFamille, date, injectedRessources) {
var period = periods.map(date);
var prestationsToDisplay = _.cloneDeep(PRESTATIONS);
// Don't show prestations that have been injected by the user, and not calculated by the simulator
_.forEach(injectedRessources, function(resource) {
delete prestationsToDisplay[resource];
});
return _.mapValues(prestationsToDisplay, function(format, prestationName) {
var type = format.type,
computedPrestation = openFiscaFamille[prestationName],
result = computedPrestation[period];
var uncomputabilityReason = openFiscaFamille[prestationName + '_non_calculable'] && openFiscaFamille[prestationName + '_non_calculable'][period];
if (uncomputabilityReason) {
return uncomputabilityReason;
}
if (format.montantAnnuel) {
result *= 12;
}
if (type == Number) {
result = Number(result.toFixed(2));
}
return result;
});
};
| Fix bug about not displaying injected prestations | Fix bug about not displaying injected prestations
In previous implem, if a prestation was injected in one test, it would never be displayed in other tests
| JavaScript | agpl-3.0 | sgmap/mes-aides-api | javascript | ## Code Before:
var _ = require('lodash');
var periods = require('./periods');
var PRESTATIONS = require('../prestations');
module.exports = function reverseMap(openFiscaFamille, date, injectedRessources) {
var period = periods.map(date);
// Don't show prestations that have been injected by the user, and not calculated by the simulator
_.forEach(injectedRessources, function(resource) {
delete PRESTATIONS[resource];
});
return _.mapValues(PRESTATIONS, function(format, prestationName) {
var type = format.type,
computedPrestation = openFiscaFamille[prestationName],
result = computedPrestation[period];
var uncomputabilityReason = openFiscaFamille[prestationName + '_non_calculable'] && openFiscaFamille[prestationName + '_non_calculable'][period];
if (uncomputabilityReason) {
return uncomputabilityReason;
}
if (format.montantAnnuel) {
result *= 12;
}
if (type == Number) {
result = Number(result.toFixed(2));
}
return result;
});
};
## Instruction:
Fix bug about not displaying injected prestations
In previous implem, if a prestation was injected in one test, it would never be displayed in other tests
## Code After:
var _ = require('lodash');
var periods = require('./periods');
var PRESTATIONS = require('../prestations');
module.exports = function reverseMap(openFiscaFamille, date, injectedRessources) {
var period = periods.map(date);
var prestationsToDisplay = _.cloneDeep(PRESTATIONS);
// Don't show prestations that have been injected by the user, and not calculated by the simulator
_.forEach(injectedRessources, function(resource) {
delete prestationsToDisplay[resource];
});
return _.mapValues(prestationsToDisplay, function(format, prestationName) {
var type = format.type,
computedPrestation = openFiscaFamille[prestationName],
result = computedPrestation[period];
var uncomputabilityReason = openFiscaFamille[prestationName + '_non_calculable'] && openFiscaFamille[prestationName + '_non_calculable'][period];
if (uncomputabilityReason) {
return uncomputabilityReason;
}
if (format.montantAnnuel) {
result *= 12;
}
if (type == Number) {
result = Number(result.toFixed(2));
}
return result;
});
};
| var _ = require('lodash');
var periods = require('./periods');
var PRESTATIONS = require('../prestations');
module.exports = function reverseMap(openFiscaFamille, date, injectedRessources) {
var period = periods.map(date);
+ var prestationsToDisplay = _.cloneDeep(PRESTATIONS);
// Don't show prestations that have been injected by the user, and not calculated by the simulator
_.forEach(injectedRessources, function(resource) {
- delete PRESTATIONS[resource];
+ delete prestationsToDisplay[resource];
});
- return _.mapValues(PRESTATIONS, function(format, prestationName) {
? ^^^^ ^^^^^^
+ return _.mapValues(prestationsToDisplay, function(format, prestationName) {
? ^^^^^^^^^^^ ^^^^^^^^
var type = format.type,
computedPrestation = openFiscaFamille[prestationName],
result = computedPrestation[period];
var uncomputabilityReason = openFiscaFamille[prestationName + '_non_calculable'] && openFiscaFamille[prestationName + '_non_calculable'][period];
if (uncomputabilityReason) {
return uncomputabilityReason;
}
if (format.montantAnnuel) {
result *= 12;
}
if (type == Number) {
result = Number(result.toFixed(2));
}
return result;
});
}; | 5 | 0.138889 | 3 | 2 |
23e2592bfeab73b256d16fd524773da846263af5 | .travis.yml | .travis.yml | language: python
python:
- 3.3
- 2.7
before_install:
- git clone https://github.com/anchor/libmarquise.git ../libmarquise/
- cd ../libmarquise/
- autoreconf -i
- ./configure
- make
- sudo make install
- sudo su -c "echo '/usr/local/lib' > /etc/ld.so.conf.d/local.conf"
- sudo ldconfig
- cd -
# command to run tests
script:
- make test
- make test-coverage-of-main-in-testsuite
| language: python
python:
- 3.4
- 3.3
- 3.2
- 2.7
- 2.6
before_install:
- git clone https://github.com/anchor/libmarquise.git ../libmarquise/
- cd ../libmarquise/
- autoreconf -i
- ./configure
- make
- sudo make install
- sudo su -c "echo '/usr/local/lib' > /etc/ld.so.conf.d/local.conf"
- sudo ldconfig
- cd -
# command to run tests
script:
- make test
- make test-coverage-of-main-in-testsuite
| Test a healthy range of Python versions | Test a healthy range of Python versions
| YAML | mit | anchor/pymarquise,anchor/pymarquise | yaml | ## Code Before:
language: python
python:
- 3.3
- 2.7
before_install:
- git clone https://github.com/anchor/libmarquise.git ../libmarquise/
- cd ../libmarquise/
- autoreconf -i
- ./configure
- make
- sudo make install
- sudo su -c "echo '/usr/local/lib' > /etc/ld.so.conf.d/local.conf"
- sudo ldconfig
- cd -
# command to run tests
script:
- make test
- make test-coverage-of-main-in-testsuite
## Instruction:
Test a healthy range of Python versions
## Code After:
language: python
python:
- 3.4
- 3.3
- 3.2
- 2.7
- 2.6
before_install:
- git clone https://github.com/anchor/libmarquise.git ../libmarquise/
- cd ../libmarquise/
- autoreconf -i
- ./configure
- make
- sudo make install
- sudo su -c "echo '/usr/local/lib' > /etc/ld.so.conf.d/local.conf"
- sudo ldconfig
- cd -
# command to run tests
script:
- make test
- make test-coverage-of-main-in-testsuite
| language: python
python:
+ - 3.4
- 3.3
+ - 3.2
- 2.7
+ - 2.6
before_install:
- git clone https://github.com/anchor/libmarquise.git ../libmarquise/
- cd ../libmarquise/
- autoreconf -i
- ./configure
- make
- sudo make install
- sudo su -c "echo '/usr/local/lib' > /etc/ld.so.conf.d/local.conf"
- sudo ldconfig
- cd -
# command to run tests
script:
- make test
- make test-coverage-of-main-in-testsuite | 3 | 0.142857 | 3 | 0 |
57c6de62ae64f191a5d50778ca06d4f4c73aadea | qr.php | qr.php | <?php
class qr extends Script
{
public function run()
{
return $this->send('https://chart.googleapis.com/chart?chs=547x547&cht=qr&chl=' . urlencode($this->matches[1]), 'image');
}
}
| <?php
class qr extends Script
{
protected $helpMessage = "'qr STRING'\n";
protected $description = 'Returns a QR-code representing the given string';
public function run()
{
return $this->send('https://chart.googleapis.com/chart?chs=547x547&cht=qr&chl=' . urlencode($this->matches[1]), 'image');
}
}
| Complete the information for the help-plugin | Complete the information for the help-plugin
| PHP | mit | Detlefff/Detlefff-QR | php | ## Code Before:
<?php
class qr extends Script
{
public function run()
{
return $this->send('https://chart.googleapis.com/chart?chs=547x547&cht=qr&chl=' . urlencode($this->matches[1]), 'image');
}
}
## Instruction:
Complete the information for the help-plugin
## Code After:
<?php
class qr extends Script
{
protected $helpMessage = "'qr STRING'\n";
protected $description = 'Returns a QR-code representing the given string';
public function run()
{
return $this->send('https://chart.googleapis.com/chart?chs=547x547&cht=qr&chl=' . urlencode($this->matches[1]), 'image');
}
}
| <?php
class qr extends Script
{
+ protected $helpMessage = "'qr STRING'\n";
+ protected $description = 'Returns a QR-code representing the given string';
+
public function run()
{
return $this->send('https://chart.googleapis.com/chart?chs=547x547&cht=qr&chl=' . urlencode($this->matches[1]), 'image');
}
} | 3 | 0.375 | 3 | 0 |
20df02ce9cce654c4381c8bbdf0972cce488dcb7 | src/data/ingredients.json | src/data/ingredients.json | [
"Thunfisch",
"Sardellen",
"Meeresfrüchte",
"Krabben",
"Zwiebel",
"Kidney-Bohnen",
"Fetakäse",
"Silberzwiebeln",
"Pepperoni (scharf)",
"Pepperoni (mild)",
"frischer Paprika",
"Rucola",
"Mais",
"Zucchini",
"Spargel",
"Artischocken",
"frische Pilze",
"Auberginen",
"Spinat",
"Broccoli",
"Ananas",
"Oliven",
"Kapern",
"Tomaten",
"Ei",
"Hackfleischsoße",
"Salami",
"ital. Salami",
"Hackfleischklöse",
"Vorderschinken",
"Speck",
"Hähnchenfleisch",
"türk. Salami"
] | [
"Thunfisch",
"Sardellen",
"Meeresfrüchte",
"Krabben",
"Zwiebel",
"Kidney-Bohnen",
"Fetakäse",
"Silberzwiebeln",
"Pepperoni (scharf)",
"Pepperoni (mild)",
"frischer Paprika",
"Rucola",
"Mais",
"Zucchini",
"Spargel",
"Artischocken",
"frische Pilze",
"Auberginen",
"Spinat",
"Broccoli",
"Ananas",
"Oliven",
"Kapern",
"Tomaten",
"Ei",
"Hackfleischsoße",
"Salami",
"ital. Salami",
"Hackfleischklöse",
"Vorderschinken",
"Speck",
"Hähnchenfleisch",
"türk. Salami",
"Sucuk",
"Maultaschen"
] | Add Sucuk and Maultaschen as ingredient | Add Sucuk and Maultaschen as ingredient
| JSON | mit | spethso/PizzaCalculator,spethso/PizzaCalculator | json | ## Code Before:
[
"Thunfisch",
"Sardellen",
"Meeresfrüchte",
"Krabben",
"Zwiebel",
"Kidney-Bohnen",
"Fetakäse",
"Silberzwiebeln",
"Pepperoni (scharf)",
"Pepperoni (mild)",
"frischer Paprika",
"Rucola",
"Mais",
"Zucchini",
"Spargel",
"Artischocken",
"frische Pilze",
"Auberginen",
"Spinat",
"Broccoli",
"Ananas",
"Oliven",
"Kapern",
"Tomaten",
"Ei",
"Hackfleischsoße",
"Salami",
"ital. Salami",
"Hackfleischklöse",
"Vorderschinken",
"Speck",
"Hähnchenfleisch",
"türk. Salami"
]
## Instruction:
Add Sucuk and Maultaschen as ingredient
## Code After:
[
"Thunfisch",
"Sardellen",
"Meeresfrüchte",
"Krabben",
"Zwiebel",
"Kidney-Bohnen",
"Fetakäse",
"Silberzwiebeln",
"Pepperoni (scharf)",
"Pepperoni (mild)",
"frischer Paprika",
"Rucola",
"Mais",
"Zucchini",
"Spargel",
"Artischocken",
"frische Pilze",
"Auberginen",
"Spinat",
"Broccoli",
"Ananas",
"Oliven",
"Kapern",
"Tomaten",
"Ei",
"Hackfleischsoße",
"Salami",
"ital. Salami",
"Hackfleischklöse",
"Vorderschinken",
"Speck",
"Hähnchenfleisch",
"türk. Salami",
"Sucuk",
"Maultaschen"
] | [
"Thunfisch",
"Sardellen",
"Meeresfrüchte",
"Krabben",
"Zwiebel",
"Kidney-Bohnen",
"Fetakäse",
"Silberzwiebeln",
"Pepperoni (scharf)",
"Pepperoni (mild)",
"frischer Paprika",
"Rucola",
"Mais",
"Zucchini",
"Spargel",
"Artischocken",
"frische Pilze",
"Auberginen",
"Spinat",
"Broccoli",
"Ananas",
"Oliven",
"Kapern",
"Tomaten",
"Ei",
"Hackfleischsoße",
"Salami",
"ital. Salami",
"Hackfleischklöse",
"Vorderschinken",
"Speck",
"Hähnchenfleisch",
- "türk. Salami"
+ "türk. Salami",
? +
+ "Sucuk",
+ "Maultaschen"
] | 4 | 0.114286 | 3 | 1 |
cc078aadc25489ef5a46eb171e3d844d43924df4 | corehq/apps/app_manager/static/app_manager/js/settings/translations.js | corehq/apps/app_manager/static/app_manager/js/settings/translations.js | hqDefine("app_manager/js/settings/translations", function () {
var appTranslationsModel = function (options) {
hqImport("hqwebapp/js/assert_properties").assertRequired(options, ['baseUrl', 'lang']);
var self = {};
self.file = ko.observable();
self.lang = ko.observable(options.lang);
self.url = ko.computed(function () {
return options.baseUrl + "?lang=" + self.lang();
});
return self;
};
$(function () {
// Bulk CommCare translations
var $commcareForm = $("#bulk_ui_translation_upload_form");
if ($commcareForm.length) {
$commcareForm.koApplyBindings({
file: ko.observable(),
});
}
// Bulk application translations
var $translationsPanel = $("#bulk-application-translations");
if ($translationsPanel.length) {
$translationsPanel.koApplyBindings(appTranslationsModel({
baseUrl: $translationsPanel.find("#download_link").attr("href"),
lang: $translationsPanel.find("select").val() || '',
}));
}
});
});
| hqDefine("app_manager/js/settings/translations", function () {
var appTranslationsModel = function (options) {
hqImport("hqwebapp/js/assert_properties").assertRequired(options, ['baseUrl', 'lang', 'skipBlacklisted']);
var self = {};
self.file = ko.observable();
self.lang = ko.observable(options.lang);
self.skipBlacklisted = ko.observable(options.skipBlacklisted);
self.url = ko.computed(function () {
return options.baseUrl + "?lang=" + self.lang() + "&skipbl=" + self.skipBlacklisted();
});
return self;
};
$(function () {
// Bulk CommCare translations
var $commcareForm = $("#bulk_ui_translation_upload_form");
if ($commcareForm.length) {
$commcareForm.koApplyBindings({
file: ko.observable(),
});
}
// Bulk application translations
var $translationsPanel = $("#bulk-application-translations");
if ($translationsPanel.length) {
$translationsPanel.koApplyBindings(appTranslationsModel({
baseUrl: $translationsPanel.find("#download_link").attr("href"),
lang: $translationsPanel.find("select").val() || '',
skipBlacklisted: $translationsPanel.find("#skip_blacklisted").val() || '',
}));
}
});
});
| Add value to JS viewmodel | Add value to JS viewmodel
| JavaScript | bsd-3-clause | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | javascript | ## Code Before:
hqDefine("app_manager/js/settings/translations", function () {
var appTranslationsModel = function (options) {
hqImport("hqwebapp/js/assert_properties").assertRequired(options, ['baseUrl', 'lang']);
var self = {};
self.file = ko.observable();
self.lang = ko.observable(options.lang);
self.url = ko.computed(function () {
return options.baseUrl + "?lang=" + self.lang();
});
return self;
};
$(function () {
// Bulk CommCare translations
var $commcareForm = $("#bulk_ui_translation_upload_form");
if ($commcareForm.length) {
$commcareForm.koApplyBindings({
file: ko.observable(),
});
}
// Bulk application translations
var $translationsPanel = $("#bulk-application-translations");
if ($translationsPanel.length) {
$translationsPanel.koApplyBindings(appTranslationsModel({
baseUrl: $translationsPanel.find("#download_link").attr("href"),
lang: $translationsPanel.find("select").val() || '',
}));
}
});
});
## Instruction:
Add value to JS viewmodel
## Code After:
hqDefine("app_manager/js/settings/translations", function () {
var appTranslationsModel = function (options) {
hqImport("hqwebapp/js/assert_properties").assertRequired(options, ['baseUrl', 'lang', 'skipBlacklisted']);
var self = {};
self.file = ko.observable();
self.lang = ko.observable(options.lang);
self.skipBlacklisted = ko.observable(options.skipBlacklisted);
self.url = ko.computed(function () {
return options.baseUrl + "?lang=" + self.lang() + "&skipbl=" + self.skipBlacklisted();
});
return self;
};
$(function () {
// Bulk CommCare translations
var $commcareForm = $("#bulk_ui_translation_upload_form");
if ($commcareForm.length) {
$commcareForm.koApplyBindings({
file: ko.observable(),
});
}
// Bulk application translations
var $translationsPanel = $("#bulk-application-translations");
if ($translationsPanel.length) {
$translationsPanel.koApplyBindings(appTranslationsModel({
baseUrl: $translationsPanel.find("#download_link").attr("href"),
lang: $translationsPanel.find("select").val() || '',
skipBlacklisted: $translationsPanel.find("#skip_blacklisted").val() || '',
}));
}
});
});
| hqDefine("app_manager/js/settings/translations", function () {
var appTranslationsModel = function (options) {
- hqImport("hqwebapp/js/assert_properties").assertRequired(options, ['baseUrl', 'lang']);
+ hqImport("hqwebapp/js/assert_properties").assertRequired(options, ['baseUrl', 'lang', 'skipBlacklisted']);
? +++++++++++++++++++
var self = {};
self.file = ko.observable();
self.lang = ko.observable(options.lang);
+ self.skipBlacklisted = ko.observable(options.skipBlacklisted);
self.url = ko.computed(function () {
- return options.baseUrl + "?lang=" + self.lang();
+ return options.baseUrl + "?lang=" + self.lang() + "&skipbl=" + self.skipBlacklisted();
? ++++++++++++++++++++++++++++++++++++++
});
return self;
};
$(function () {
// Bulk CommCare translations
var $commcareForm = $("#bulk_ui_translation_upload_form");
if ($commcareForm.length) {
$commcareForm.koApplyBindings({
file: ko.observable(),
});
}
// Bulk application translations
var $translationsPanel = $("#bulk-application-translations");
if ($translationsPanel.length) {
$translationsPanel.koApplyBindings(appTranslationsModel({
baseUrl: $translationsPanel.find("#download_link").attr("href"),
lang: $translationsPanel.find("select").val() || '',
+ skipBlacklisted: $translationsPanel.find("#skip_blacklisted").val() || '',
}));
}
});
}); | 6 | 0.181818 | 4 | 2 |
f00a56589f5d344ba14633b99ae1c9233a53fdb1 | .vscode/launch.json | .vscode/launch.json | {
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"type": "chrome",
"request": "launch",
"name": "Debug extension",
"url": "chrome://extensions/",
"webRoot": "${workspaceFolder}/main",
"runtimeArgs": ["--load-extension=${workspaceFolder}/main"]
},
{
"type": "chrome",
"request": "launch",
"name": "Debug tests",
"url": "",
"webRoot": "${workspaceFolder}/output",
"runtimeArgs": [
"--load-extension=${workspaceFolder}/output",
"--auto-open-devtools-for-tabs"
]
},
{
"type": "node",
"request": "launch",
"name": "Debug test launcher",
"program": "${workspaceFolder}/output/test/run-tests.js"
}
]
}
| {
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Launch Chrome",
"request": "launch",
"type": "pwa-chrome",
"url": "chrome-extension://eofmjdpbknpodfmdfpkcneiialiodobm/html/session-manager.html",
"webRoot": "${workspaceFolder}/src",
"runtimeArgs": ["--load-extension=${workspaceFolder}/src"],
// "urlFilter": "chrome-extension://*"
},
]
}
| Update debug configs to use new built-in extension | Update debug configs to use new built-in extension
| JSON | mit | moderatemisbehaviour/seshy,moderatemisbehaviour/seshy | json | ## Code Before:
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"type": "chrome",
"request": "launch",
"name": "Debug extension",
"url": "chrome://extensions/",
"webRoot": "${workspaceFolder}/main",
"runtimeArgs": ["--load-extension=${workspaceFolder}/main"]
},
{
"type": "chrome",
"request": "launch",
"name": "Debug tests",
"url": "",
"webRoot": "${workspaceFolder}/output",
"runtimeArgs": [
"--load-extension=${workspaceFolder}/output",
"--auto-open-devtools-for-tabs"
]
},
{
"type": "node",
"request": "launch",
"name": "Debug test launcher",
"program": "${workspaceFolder}/output/test/run-tests.js"
}
]
}
## Instruction:
Update debug configs to use new built-in extension
## Code After:
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Launch Chrome",
"request": "launch",
"type": "pwa-chrome",
"url": "chrome-extension://eofmjdpbknpodfmdfpkcneiialiodobm/html/session-manager.html",
"webRoot": "${workspaceFolder}/src",
"runtimeArgs": ["--load-extension=${workspaceFolder}/src"],
// "urlFilter": "chrome-extension://*"
},
]
}
| {
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
- "type": "chrome",
? ^^^
+ "name": "Launch Chrome",
? ^^^ ++++ +++
"request": "launch",
- "name": "Debug extension",
- "url": "chrome://extensions/",
+ "type": "pwa-chrome",
+ "url": "chrome-extension://eofmjdpbknpodfmdfpkcneiialiodobm/html/session-manager.html",
- "webRoot": "${workspaceFolder}/main",
? ^^^^
+ "webRoot": "${workspaceFolder}/src",
? ^^^
- "runtimeArgs": ["--load-extension=${workspaceFolder}/main"]
? ^^^^
+ "runtimeArgs": ["--load-extension=${workspaceFolder}/src"],
? ^^^ +
+ // "urlFilter": "chrome-extension://*"
},
- {
- "type": "chrome",
- "request": "launch",
- "name": "Debug tests",
- "url": "",
- "webRoot": "${workspaceFolder}/output",
- "runtimeArgs": [
- "--load-extension=${workspaceFolder}/output",
- "--auto-open-devtools-for-tabs"
- ]
- },
- {
- "type": "node",
- "request": "launch",
- "name": "Debug test launcher",
- "program": "${workspaceFolder}/output/test/run-tests.js"
- }
]
} | 28 | 0.848485 | 6 | 22 |
5321a5e48585ff026a28c1de67f8841dec3dc93b | scripts/docker-postgres.sh | scripts/docker-postgres.sh | export POSTGRES_PASSWORD="uleash"
echo "starting postgres in docker "
HASH=`docker run -P --name unleash-postgres -e POSTGRES_PASSWORD=$POSTGRES_PASSWORD -d postgres:9.3`
export PGPORT=`docker ps| grep unleash-post| awk '{print $(NF-1)}'| awk -F "->" '{print $1}'| awk -F \: '{print $2}'`
echo "PGPORT: $PGPORT"
echo ""
# ----------- Wait for postgres to start -----------
if [ -z "$DOCKER_HOST" ]
then
export database_host="127.0.0.1"
else
export database_host=$(echo $DOCKER_HOST |awk -F \/ '{print $NF}'| awk -F \: '{print $1}')
fi
for i in `seq 1 120`;
do
echo -n "."
sleep 1
netcat -z $database_host $PGPORT && echo "postgres is up and running in docker in $i seconds!" && break
done
export TEST_DATABASE_URL=postgres://postgres:$POSTGRES_PASSWORD@$database_host:$PGPORT/postgres
npm install
DATABASE_URL=$TEST_DATABASE_URL ./node_modules/.bin/db-migrate up
npm test
docker stop $HASH
docker rm $HASH
| export POSTGRES_PASSWORD="uleash"
echo "starting postgres in docker "
HASH=`docker run -P --name unleash-postgres -e POSTGRES_PASSWORD=$POSTGRES_PASSWORD -d postgres:9.3`
export PGPORT=`docker ps| grep unleash-post| awk '{print $(NF-1)}'| awk -F "->" '{print $1}'| awk -F \: '{print $2}'`
echo "PGPORT: $PGPORT"
echo ""
# ----------- Wait for postgres to start -----------
if [ -z "$DOCKER_HOST" ]
then
export database_host="127.0.0.1"
else
export database_host=$(echo $DOCKER_HOST |awk -F \/ '{print $NF}'| awk -F \: '{print $1}')
fi
for i in `seq 1 120`;
do
echo -n "."
sleep 1
netcat -z $database_host $PGPORT && echo "postgres is up and running in docker in $i seconds!" && break
done
export TEST_DATABASE_URL=postgres://postgres:$POSTGRES_PASSWORD@$database_host:$PGPORT/postgres
yarn
DATABASE_URL=$TEST_DATABASE_URL ./node_modules/.bin/db-migrate up
yarn test
docker stop $HASH
docker rm $HASH
| Use yarn in test script | Use yarn in test script
| Shell | apache-2.0 | Unleash/unleash,Unleash/unleash,finn-no/unleash,Unleash/unleash,finn-no/unleash,finn-no/unleash,Unleash/unleash | shell | ## Code Before:
export POSTGRES_PASSWORD="uleash"
echo "starting postgres in docker "
HASH=`docker run -P --name unleash-postgres -e POSTGRES_PASSWORD=$POSTGRES_PASSWORD -d postgres:9.3`
export PGPORT=`docker ps| grep unleash-post| awk '{print $(NF-1)}'| awk -F "->" '{print $1}'| awk -F \: '{print $2}'`
echo "PGPORT: $PGPORT"
echo ""
# ----------- Wait for postgres to start -----------
if [ -z "$DOCKER_HOST" ]
then
export database_host="127.0.0.1"
else
export database_host=$(echo $DOCKER_HOST |awk -F \/ '{print $NF}'| awk -F \: '{print $1}')
fi
for i in `seq 1 120`;
do
echo -n "."
sleep 1
netcat -z $database_host $PGPORT && echo "postgres is up and running in docker in $i seconds!" && break
done
export TEST_DATABASE_URL=postgres://postgres:$POSTGRES_PASSWORD@$database_host:$PGPORT/postgres
npm install
DATABASE_URL=$TEST_DATABASE_URL ./node_modules/.bin/db-migrate up
npm test
docker stop $HASH
docker rm $HASH
## Instruction:
Use yarn in test script
## Code After:
export POSTGRES_PASSWORD="uleash"
echo "starting postgres in docker "
HASH=`docker run -P --name unleash-postgres -e POSTGRES_PASSWORD=$POSTGRES_PASSWORD -d postgres:9.3`
export PGPORT=`docker ps| grep unleash-post| awk '{print $(NF-1)}'| awk -F "->" '{print $1}'| awk -F \: '{print $2}'`
echo "PGPORT: $PGPORT"
echo ""
# ----------- Wait for postgres to start -----------
if [ -z "$DOCKER_HOST" ]
then
export database_host="127.0.0.1"
else
export database_host=$(echo $DOCKER_HOST |awk -F \/ '{print $NF}'| awk -F \: '{print $1}')
fi
for i in `seq 1 120`;
do
echo -n "."
sleep 1
netcat -z $database_host $PGPORT && echo "postgres is up and running in docker in $i seconds!" && break
done
export TEST_DATABASE_URL=postgres://postgres:$POSTGRES_PASSWORD@$database_host:$PGPORT/postgres
yarn
DATABASE_URL=$TEST_DATABASE_URL ./node_modules/.bin/db-migrate up
yarn test
docker stop $HASH
docker rm $HASH
| export POSTGRES_PASSWORD="uleash"
echo "starting postgres in docker "
HASH=`docker run -P --name unleash-postgres -e POSTGRES_PASSWORD=$POSTGRES_PASSWORD -d postgres:9.3`
export PGPORT=`docker ps| grep unleash-post| awk '{print $(NF-1)}'| awk -F "->" '{print $1}'| awk -F \: '{print $2}'`
echo "PGPORT: $PGPORT"
echo ""
# ----------- Wait for postgres to start -----------
if [ -z "$DOCKER_HOST" ]
then
export database_host="127.0.0.1"
else
export database_host=$(echo $DOCKER_HOST |awk -F \/ '{print $NF}'| awk -F \: '{print $1}')
fi
for i in `seq 1 120`;
do
echo -n "."
sleep 1
netcat -z $database_host $PGPORT && echo "postgres is up and running in docker in $i seconds!" && break
done
export TEST_DATABASE_URL=postgres://postgres:$POSTGRES_PASSWORD@$database_host:$PGPORT/postgres
- npm install
+ yarn
DATABASE_URL=$TEST_DATABASE_URL ./node_modules/.bin/db-migrate up
- npm test
+ yarn test
docker stop $HASH
docker rm $HASH | 4 | 0.133333 | 2 | 2 |
41224eeba8b6fd424877bef4b5855cfe81343561 | lib/kuroko2/engine.rb | lib/kuroko2/engine.rb | module Kuroko2
class Engine < ::Rails::Engine
isolate_namespace Kuroko2
config.before_configuration do
require 'kaminari'
require 'chrono'
end
config.autoload_paths << root.join('lib')
config.before_initialize do
URI.parse(Kuroko2.config.url).tap do |url|
Kuroko2.config.url_host = url.host
Kuroko2.config.url_scheme = url.scheme
Kuroko2.config.url_port = url.port
end
end
initializer "kuroko2.configuration" do |app|
if Kuroko2.config.custom_tasks
Kuroko2.config.custom_tasks.each do |key, klass|
unless Workflow::Node::TASK_REGISTORY.has_key?(key)
Workflow::Node.register(
key: key.to_sym,
klass: Workflow::Task.const_get(klass, false)
)
end
end
end
config.action_mailer.default_url_options = {
host: Kuroko2.config.url_host,
protocol: Kuroko2.config.url_scheme,
port: Kuroko2.config.url_port
}
config.action_mailer.delivery_method = Kuroko2.config.action_mailer.delivery_method.to_sym
config.action_mailer.smtp_settings =
Kuroko2.config.action_mailer.smtp_settings.to_h.symbolize_keys || {}
end
end
end
| module Kuroko2
class Engine < ::Rails::Engine
isolate_namespace Kuroko2
config.before_configuration do
require 'kaminari'
require 'chrono'
end
config.autoload_paths << root.join('lib')
initializer "kuroko2.configuration" do |app|
URI.parse(Kuroko2.config.url).tap do |url|
Kuroko2.config.url_host = url.host
Kuroko2.config.url_scheme = url.scheme
Kuroko2.config.url_port = url.port
end
if Kuroko2.config.custom_tasks
Kuroko2.config.custom_tasks.each do |key, klass|
unless Workflow::Node::TASK_REGISTORY.has_key?(key)
Workflow::Node.register(
key: key.to_sym,
klass: Workflow::Task.const_get(klass, false)
)
end
end
end
config.action_mailer.default_url_options = {
host: Kuroko2.config.url_host,
protocol: Kuroko2.config.url_scheme,
port: Kuroko2.config.url_port
}
config.action_mailer.delivery_method = Kuroko2.config.action_mailer.delivery_method.to_sym
config.action_mailer.smtp_settings =
Kuroko2.config.action_mailer.smtp_settings.to_h.symbolize_keys || {}
end
end
end
| Move configuration loading to initializer | Move configuration loading to initializer
| Ruby | mit | cookpad/kuroko2,cookpad/kuroko2,cookpad/kuroko2 | ruby | ## Code Before:
module Kuroko2
class Engine < ::Rails::Engine
isolate_namespace Kuroko2
config.before_configuration do
require 'kaminari'
require 'chrono'
end
config.autoload_paths << root.join('lib')
config.before_initialize do
URI.parse(Kuroko2.config.url).tap do |url|
Kuroko2.config.url_host = url.host
Kuroko2.config.url_scheme = url.scheme
Kuroko2.config.url_port = url.port
end
end
initializer "kuroko2.configuration" do |app|
if Kuroko2.config.custom_tasks
Kuroko2.config.custom_tasks.each do |key, klass|
unless Workflow::Node::TASK_REGISTORY.has_key?(key)
Workflow::Node.register(
key: key.to_sym,
klass: Workflow::Task.const_get(klass, false)
)
end
end
end
config.action_mailer.default_url_options = {
host: Kuroko2.config.url_host,
protocol: Kuroko2.config.url_scheme,
port: Kuroko2.config.url_port
}
config.action_mailer.delivery_method = Kuroko2.config.action_mailer.delivery_method.to_sym
config.action_mailer.smtp_settings =
Kuroko2.config.action_mailer.smtp_settings.to_h.symbolize_keys || {}
end
end
end
## Instruction:
Move configuration loading to initializer
## Code After:
module Kuroko2
class Engine < ::Rails::Engine
isolate_namespace Kuroko2
config.before_configuration do
require 'kaminari'
require 'chrono'
end
config.autoload_paths << root.join('lib')
initializer "kuroko2.configuration" do |app|
URI.parse(Kuroko2.config.url).tap do |url|
Kuroko2.config.url_host = url.host
Kuroko2.config.url_scheme = url.scheme
Kuroko2.config.url_port = url.port
end
if Kuroko2.config.custom_tasks
Kuroko2.config.custom_tasks.each do |key, klass|
unless Workflow::Node::TASK_REGISTORY.has_key?(key)
Workflow::Node.register(
key: key.to_sym,
klass: Workflow::Task.const_get(klass, false)
)
end
end
end
config.action_mailer.default_url_options = {
host: Kuroko2.config.url_host,
protocol: Kuroko2.config.url_scheme,
port: Kuroko2.config.url_port
}
config.action_mailer.delivery_method = Kuroko2.config.action_mailer.delivery_method.to_sym
config.action_mailer.smtp_settings =
Kuroko2.config.action_mailer.smtp_settings.to_h.symbolize_keys || {}
end
end
end
| module Kuroko2
class Engine < ::Rails::Engine
isolate_namespace Kuroko2
config.before_configuration do
require 'kaminari'
require 'chrono'
end
config.autoload_paths << root.join('lib')
- config.before_initialize do
+ initializer "kuroko2.configuration" do |app|
URI.parse(Kuroko2.config.url).tap do |url|
Kuroko2.config.url_host = url.host
Kuroko2.config.url_scheme = url.scheme
Kuroko2.config.url_port = url.port
end
- end
- initializer "kuroko2.configuration" do |app|
if Kuroko2.config.custom_tasks
Kuroko2.config.custom_tasks.each do |key, klass|
unless Workflow::Node::TASK_REGISTORY.has_key?(key)
Workflow::Node.register(
key: key.to_sym,
klass: Workflow::Task.const_get(klass, false)
)
end
end
end
config.action_mailer.default_url_options = {
host: Kuroko2.config.url_host,
protocol: Kuroko2.config.url_scheme,
port: Kuroko2.config.url_port
}
config.action_mailer.delivery_method = Kuroko2.config.action_mailer.delivery_method.to_sym
config.action_mailer.smtp_settings =
Kuroko2.config.action_mailer.smtp_settings.to_h.symbolize_keys || {}
end
end
end | 4 | 0.093023 | 1 | 3 |
3b3050e11da73aed4b2d393141e2568fa133b740 | app/hero.service.ts | app/hero.service.ts | import { Injectable } from '@angular/core';
@Injectable()
export class HeroService {
}
| import { Injectable } from '@angular/core';
@Injectable()
export class HeroService {
getHeroes(): void {} // stub
}
| Add a getHeroes method stub. | Add a getHeroes method stub.
| TypeScript | mit | amalshehu/angular-tour-of-heroes,amalshehu/angular-tour-of-heroes,amalshehu/angular-tour-of-heroes | typescript | ## Code Before:
import { Injectable } from '@angular/core';
@Injectable()
export class HeroService {
}
## Instruction:
Add a getHeroes method stub.
## Code After:
import { Injectable } from '@angular/core';
@Injectable()
export class HeroService {
getHeroes(): void {} // stub
}
| import { Injectable } from '@angular/core';
@Injectable()
export class HeroService {
+ getHeroes(): void {} // stub
} | 1 | 0.2 | 1 | 0 |
bfdac78e668a68bc1bbfe26626d7a615cef67b18 | .travis.yml | .travis.yml | matrix:
include:
- rvm: 1.8.7
gemfile: Gemfile.1.8.7
- rvm: 1.9.2
gemfile: Gemfile
- rvm: 1.9.3
gemfile: Gemfile
- rvm: 2.0.0
gemfile: Gemfile
- rvm: jruby-18mode
gemfile: Gemfile.1.8.7
- rvm: jruby-19mode
gemfile: Gemfile
- rvm: rbx-18mode
gemfile: Gemfile.1.8.7
- rvm: rbx-19mode
gemfile: Gemfile
branches:
only:
- master
script: "bundle exec rake spec"
| matrix:
include:
- rvm: 1.8.7
gemfile: Gemfile.1.8.7
- rvm: 1.9.2
gemfile: Gemfile
- rvm: 1.9.3
gemfile: Gemfile
- rvm: 2.0.0
gemfile: Gemfile
- rvm: jruby-18mode
gemfile: Gemfile.1.8.7
- rvm: jruby-19mode
gemfile: Gemfile
- rvm: rbx-18mode
gemfile: Gemfile.1.8.7
- rvm: rbx-19mode
gemfile: Gemfile
allowed_failures:
- rvm: jruby-18mode
gemfile: Gemfile.1.8.7
- rvm: jruby-19mode
gemfile: Gemfile
branches:
only:
- master
script: "bundle exec rake spec"
| Disable testing on jRuby until the big changes land | Disable testing on jRuby until the big changes land
| YAML | mit | bhollis/maruku | yaml | ## Code Before:
matrix:
include:
- rvm: 1.8.7
gemfile: Gemfile.1.8.7
- rvm: 1.9.2
gemfile: Gemfile
- rvm: 1.9.3
gemfile: Gemfile
- rvm: 2.0.0
gemfile: Gemfile
- rvm: jruby-18mode
gemfile: Gemfile.1.8.7
- rvm: jruby-19mode
gemfile: Gemfile
- rvm: rbx-18mode
gemfile: Gemfile.1.8.7
- rvm: rbx-19mode
gemfile: Gemfile
branches:
only:
- master
script: "bundle exec rake spec"
## Instruction:
Disable testing on jRuby until the big changes land
## Code After:
matrix:
include:
- rvm: 1.8.7
gemfile: Gemfile.1.8.7
- rvm: 1.9.2
gemfile: Gemfile
- rvm: 1.9.3
gemfile: Gemfile
- rvm: 2.0.0
gemfile: Gemfile
- rvm: jruby-18mode
gemfile: Gemfile.1.8.7
- rvm: jruby-19mode
gemfile: Gemfile
- rvm: rbx-18mode
gemfile: Gemfile.1.8.7
- rvm: rbx-19mode
gemfile: Gemfile
allowed_failures:
- rvm: jruby-18mode
gemfile: Gemfile.1.8.7
- rvm: jruby-19mode
gemfile: Gemfile
branches:
only:
- master
script: "bundle exec rake spec"
| matrix:
include:
- rvm: 1.8.7
gemfile: Gemfile.1.8.7
- rvm: 1.9.2
gemfile: Gemfile
- rvm: 1.9.3
gemfile: Gemfile
- rvm: 2.0.0
gemfile: Gemfile
- rvm: jruby-18mode
gemfile: Gemfile.1.8.7
- rvm: jruby-19mode
gemfile: Gemfile
- rvm: rbx-18mode
gemfile: Gemfile.1.8.7
- rvm: rbx-19mode
gemfile: Gemfile
+ allowed_failures:
+ - rvm: jruby-18mode
+ gemfile: Gemfile.1.8.7
+ - rvm: jruby-19mode
+ gemfile: Gemfile
+
branches:
only:
- master
script: "bundle exec rake spec"
| 6 | 0.24 | 6 | 0 |
e0fdebc981fd581105d85493b4d81ddbf3af974b | src/routes.jsx | src/routes.jsx | import React from 'react';
import { Router, Route, IndexRoute, browserHistory } from 'react-router';
import Main from 'Main';
import Forecast from 'Forecast';
import Windspeed from 'Windspeed';
import Humidity from 'Humidity';
import Averages from 'Averages';
export const routes = (
<Router history={browserHistory}>
<Route path="/" component={Main}>
<IndexRoute component={Forecast} />
<Route path="forecast" component={Forecast} />
<Route path="windspeed" component={Windspeed} />
<Route path="humidity" component={Humidity} />
<Route path="averages" component={Averages} />
</Route>
</Router>
)
| import React from 'react';
import { Router, Route, IndexRedirect, browserHistory } from 'react-router';
import Main from 'Main';
import Forecast from 'Forecast';
import Windspeed from 'Windspeed';
import Humidity from 'Humidity';
import Averages from 'Averages';
export const routes = (
<Router history={browserHistory}>
<Route path="/" component={Main}>
<IndexRedirect to="/forecast" />
<Route path="forecast" component={Forecast} />
<Route path="windspeed" component={Windspeed} />
<Route path="humidity" component={Humidity} />
<Route path="averages" component={Averages} />
</Route>
</Router>
)
| Switch index route to an index redirect | Switch index route to an index redirect
| JSX | mit | JavierPDev/Weather-D3,JavierPDev/Weather-D3 | jsx | ## Code Before:
import React from 'react';
import { Router, Route, IndexRoute, browserHistory } from 'react-router';
import Main from 'Main';
import Forecast from 'Forecast';
import Windspeed from 'Windspeed';
import Humidity from 'Humidity';
import Averages from 'Averages';
export const routes = (
<Router history={browserHistory}>
<Route path="/" component={Main}>
<IndexRoute component={Forecast} />
<Route path="forecast" component={Forecast} />
<Route path="windspeed" component={Windspeed} />
<Route path="humidity" component={Humidity} />
<Route path="averages" component={Averages} />
</Route>
</Router>
)
## Instruction:
Switch index route to an index redirect
## Code After:
import React from 'react';
import { Router, Route, IndexRedirect, browserHistory } from 'react-router';
import Main from 'Main';
import Forecast from 'Forecast';
import Windspeed from 'Windspeed';
import Humidity from 'Humidity';
import Averages from 'Averages';
export const routes = (
<Router history={browserHistory}>
<Route path="/" component={Main}>
<IndexRedirect to="/forecast" />
<Route path="forecast" component={Forecast} />
<Route path="windspeed" component={Windspeed} />
<Route path="humidity" component={Humidity} />
<Route path="averages" component={Averages} />
</Route>
</Router>
)
| import React from 'react';
- import { Router, Route, IndexRoute, browserHistory } from 'react-router';
? ^^ -
+ import { Router, Route, IndexRedirect, browserHistory } from 'react-router';
? ^^^^^^
import Main from 'Main';
import Forecast from 'Forecast';
import Windspeed from 'Windspeed';
import Humidity from 'Humidity';
import Averages from 'Averages';
export const routes = (
<Router history={browserHistory}>
<Route path="/" component={Main}>
- <IndexRoute component={Forecast} />
+ <IndexRedirect to="/forecast" />
<Route path="forecast" component={Forecast} />
<Route path="windspeed" component={Windspeed} />
<Route path="humidity" component={Humidity} />
<Route path="averages" component={Averages} />
</Route>
</Router>
) | 4 | 0.2 | 2 | 2 |
de4aa20a66c991429ec7bb416d899e5747744bad | src/app/hero-service.stub.ts | src/app/hero-service.stub.ts | import createSpy = jasmine.createSpy
import { HEROES_DATA } from './mockup-data'
const HEROES_SERVICE_DATA = HEROES_DATA.slice()
export class HeroServiceStub {
getHero = createSpy('getHero').and.callFake(() =>
Promise.resolve(Object.assign({}, HEROES_SERVICE_DATA[0]))
)
getHeroes = createSpy('getHeroes').and.callFake(() =>
Promise.resolve(HEROES_SERVICE_DATA.slice())
)
create = createSpy('create').and.callFake((name: string) => {
HEROES_SERVICE_DATA.push({ id: 100, name })
return Promise.resolve(HEROES_SERVICE_DATA)
})
}
| import createSpy = jasmine.createSpy
import { Hero } from './hero'
import { HEROES_DATA } from './mockup-data'
export class HeroServiceStub {
private heroes: Hero[]
getHero = createSpy('getHero').and.callFake(() =>
Promise.resolve(Object.assign({}, this.heroes[0]))
)
getHeroes = createSpy('getHeroes').and.callFake(() =>
Promise.resolve(this.heroes.slice())
)
create = createSpy('create').and.callFake((name: string) => {
this.heroes.push({ id: 100, name })
return Promise.resolve(this.heroes)
})
remove = createSpy('remove').and.callFake((hero: Hero) => {
this.heroes = this.heroes.filter(h => h.id !== hero.id)
return Promise.resolve(this.heroes)
})
constructor() {
this.heroes = [...HEROES_DATA]
}
}
| Add remove method and localize heroes data | Add remove method and localize heroes data
| TypeScript | mit | hckhanh/tour-of-heroes,hckhanh/tour-of-heroes,hckhanh/tour-of-heroes | typescript | ## Code Before:
import createSpy = jasmine.createSpy
import { HEROES_DATA } from './mockup-data'
const HEROES_SERVICE_DATA = HEROES_DATA.slice()
export class HeroServiceStub {
getHero = createSpy('getHero').and.callFake(() =>
Promise.resolve(Object.assign({}, HEROES_SERVICE_DATA[0]))
)
getHeroes = createSpy('getHeroes').and.callFake(() =>
Promise.resolve(HEROES_SERVICE_DATA.slice())
)
create = createSpy('create').and.callFake((name: string) => {
HEROES_SERVICE_DATA.push({ id: 100, name })
return Promise.resolve(HEROES_SERVICE_DATA)
})
}
## Instruction:
Add remove method and localize heroes data
## Code After:
import createSpy = jasmine.createSpy
import { Hero } from './hero'
import { HEROES_DATA } from './mockup-data'
export class HeroServiceStub {
private heroes: Hero[]
getHero = createSpy('getHero').and.callFake(() =>
Promise.resolve(Object.assign({}, this.heroes[0]))
)
getHeroes = createSpy('getHeroes').and.callFake(() =>
Promise.resolve(this.heroes.slice())
)
create = createSpy('create').and.callFake((name: string) => {
this.heroes.push({ id: 100, name })
return Promise.resolve(this.heroes)
})
remove = createSpy('remove').and.callFake((hero: Hero) => {
this.heroes = this.heroes.filter(h => h.id !== hero.id)
return Promise.resolve(this.heroes)
})
constructor() {
this.heroes = [...HEROES_DATA]
}
}
| import createSpy = jasmine.createSpy
+ import { Hero } from './hero'
import { HEROES_DATA } from './mockup-data'
- const HEROES_SERVICE_DATA = HEROES_DATA.slice()
+ export class HeroServiceStub {
+ private heroes: Hero[]
- export class HeroServiceStub {
getHero = createSpy('getHero').and.callFake(() =>
- Promise.resolve(Object.assign({}, HEROES_SERVICE_DATA[0]))
+ Promise.resolve(Object.assign({}, this.heroes[0]))
)
getHeroes = createSpy('getHeroes').and.callFake(() =>
- Promise.resolve(HEROES_SERVICE_DATA.slice())
+ Promise.resolve(this.heroes.slice())
)
create = createSpy('create').and.callFake((name: string) => {
- HEROES_SERVICE_DATA.push({ id: 100, name })
- return Promise.resolve(HEROES_SERVICE_DATA)
+ this.heroes.push({ id: 100, name })
+ return Promise.resolve(this.heroes)
})
+
+ remove = createSpy('remove').and.callFake((hero: Hero) => {
+ this.heroes = this.heroes.filter(h => h.id !== hero.id)
+ return Promise.resolve(this.heroes)
+ })
+
+ constructor() {
+ this.heroes = [...HEROES_DATA]
+ }
} | 22 | 1.157895 | 16 | 6 |
730f48273c4479032727c62f4c17dd8fe42ad415 | README.md | README.md | libipc - An IPC mechanism.
Building on Linux
-----------------
1. Install libkqueue
2. Run:
```
make CFLAGS="-I/usr/include/kqueue -D_BSD_SOURCE" LDADD="-lkqueue -ldl"
```
|
libipc is an IPC mechanism for C programs. It is under heavy development
and the documentation has not been written yet.
It currently runs on FreeBSD and Linux, and I will accept patches to
run on other POSIX-like systems.
Building on FreeBSD
-----------------
Run "make" to build everything.
Building on Linux
-----------------
1. Install libkqueue
2. Run:
```
make CFLAGS="-I/usr/include/kqueue -D_BSD_SOURCE" LDADD="-lkqueue -ldl"
```
| Add a little more documentation | Add a little more documentation
| Markdown | bsd-2-clause | mheily/libipc,mheily/libipc,mheily/zipzapzop,mheily/zipzapzop,mheily/libipc,mheily/zipzapzop | markdown | ## Code Before:
libipc - An IPC mechanism.
Building on Linux
-----------------
1. Install libkqueue
2. Run:
```
make CFLAGS="-I/usr/include/kqueue -D_BSD_SOURCE" LDADD="-lkqueue -ldl"
```
## Instruction:
Add a little more documentation
## Code After:
libipc is an IPC mechanism for C programs. It is under heavy development
and the documentation has not been written yet.
It currently runs on FreeBSD and Linux, and I will accept patches to
run on other POSIX-like systems.
Building on FreeBSD
-----------------
Run "make" to build everything.
Building on Linux
-----------------
1. Install libkqueue
2. Run:
```
make CFLAGS="-I/usr/include/kqueue -D_BSD_SOURCE" LDADD="-lkqueue -ldl"
```
| - libipc - An IPC mechanism.
+
+ libipc is an IPC mechanism for C programs. It is under heavy development
+ and the documentation has not been written yet.
+
+ It currently runs on FreeBSD and Linux, and I will accept patches to
+ run on other POSIX-like systems.
+
+ Building on FreeBSD
+ -----------------
+
+ Run "make" to build everything.
Building on Linux
-----------------
1. Install libkqueue
2. Run:
```
make CFLAGS="-I/usr/include/kqueue -D_BSD_SOURCE" LDADD="-lkqueue -ldl"
``` | 12 | 1.2 | 11 | 1 |
dcaa465fe33deba6b99a958de41b512f0c70b95e | rds-api-core/pom.xml | rds-api-core/pom.xml | <?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>edu.rpi.tw.rds</groupId>
<artifactId>rds-api-core</artifactId>
<version>1.0-SNAPSHOT</version>
<packaging>jar</packaging>
<name>RDS-API Core Module</name>
</project> | <?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>edu.rpi.tw.rds</groupId>
<artifactId>rds-api-all</artifactId>
<version>1.0-SNAPSHOT</version>
</parent>
<artifactId>rds-api-core</artifactId>
<packaging>jar</packaging>
<name>RDS-API Core Module</name>
<properties>
<spring.version>4.0.4.RELEASE</spring.version>
<spring.data.mongodb.version>1.6.0.RELEASE</spring.data.mongodb.version>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-core</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-beans</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb</artifactId>
<version>${spring.data.mongodb.version}</version>
</dependency>
<!-- JSR-303 Validation -->
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-validator</artifactId>
<version>4.2.0.Final</version>
</dependency>
<!-- Apache Commons -->
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>3.3.2</version>
</dependency>
</dependencies>
</project> | Add reference to parent package, dependency for validator | Add reference to parent package, dependency for validator
| XML | mit | tetherless-world/rds-api | xml | ## Code Before:
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>edu.rpi.tw.rds</groupId>
<artifactId>rds-api-core</artifactId>
<version>1.0-SNAPSHOT</version>
<packaging>jar</packaging>
<name>RDS-API Core Module</name>
</project>
## Instruction:
Add reference to parent package, dependency for validator
## Code After:
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>edu.rpi.tw.rds</groupId>
<artifactId>rds-api-all</artifactId>
<version>1.0-SNAPSHOT</version>
</parent>
<artifactId>rds-api-core</artifactId>
<packaging>jar</packaging>
<name>RDS-API Core Module</name>
<properties>
<spring.version>4.0.4.RELEASE</spring.version>
<spring.data.mongodb.version>1.6.0.RELEASE</spring.data.mongodb.version>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-core</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-beans</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb</artifactId>
<version>${spring.data.mongodb.version}</version>
</dependency>
<!-- JSR-303 Validation -->
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-validator</artifactId>
<version>4.2.0.Final</version>
</dependency>
<!-- Apache Commons -->
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>3.3.2</version>
</dependency>
</dependencies>
</project> | <?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
+ <parent>
- <groupId>edu.rpi.tw.rds</groupId>
+ <groupId>edu.rpi.tw.rds</groupId>
? ++++
+ <artifactId>rds-api-all</artifactId>
+ <version>1.0-SNAPSHOT</version>
+ </parent>
+
<artifactId>rds-api-core</artifactId>
- <version>1.0-SNAPSHOT</version>
<packaging>jar</packaging>
<name>RDS-API Core Module</name>
+ <properties>
+ <spring.version>4.0.4.RELEASE</spring.version>
+ <spring.data.mongodb.version>1.6.0.RELEASE</spring.data.mongodb.version>
+ </properties>
+
+ <dependencies>
+
+ <dependency>
+ <groupId>org.springframework</groupId>
+ <artifactId>spring-core</artifactId>
+ <version>${spring.version}</version>
+ </dependency>
+
+ <dependency>
+ <groupId>org.springframework</groupId>
+ <artifactId>spring-beans</artifactId>
+ <version>${spring.version}</version>
+ </dependency>
+
+ <dependency>
+ <groupId>org.springframework</groupId>
+ <artifactId>spring-context</artifactId>
+ <version>${spring.version}</version>
+ </dependency>
+
+ <dependency>
+ <groupId>org.springframework.data</groupId>
+ <artifactId>spring-data-mongodb</artifactId>
+ <version>${spring.data.mongodb.version}</version>
+ </dependency>
+
+ <!-- JSR-303 Validation -->
+
+ <dependency>
+ <groupId>org.hibernate</groupId>
+ <artifactId>hibernate-validator</artifactId>
+ <version>4.2.0.Final</version>
+ </dependency>
+
+ <!-- Apache Commons -->
+
+ <dependency>
+ <groupId>org.apache.commons</groupId>
+ <artifactId>commons-lang3</artifactId>
+ <version>3.3.2</version>
+ </dependency>
+
+ </dependencies>
+
</project> | 57 | 3.8 | 55 | 2 |
f441539ebc1ed2f76d28229a9025803aea4b386a | README.md | README.md | Git hook to check conformance to http://chris.beams.io/posts/git-commit/
## The seven rules of a great git commit message
1. Separate subject from body with a blank line
2. Limit the subject line to 50 characters
3. Capitalize the subject line
4. Do not end the subject line with a period
5. Use the imperative mood in the subject line
6. Wrap the body at 72 characters
7. Use the body to explain _what_ and _why_ vs. _how_
## Compatibility
Please contribute by testing on your platform if it's not listed below!
### Confirmed :thumbsup:
- Mac OSX, travis CI
- Debian, travis CI
### Unsupported :no_entry_sign:
- Windows
### Unknown :grey_question:
- Other *nixes, *BSDs, etc.
## Contribution
If you don't see your platform in the compatibility list above, please run the tests and let us know how you get on.
### Design Decisions
- The tests were originally written using `roundup`, however this dependency was dropped to make it easier for users to run the tests on their machines
| Git hook to check conformance to http://chris.beams.io/posts/git-commit/
## The seven rules of a great git commit message
1. Separate subject from body with a blank line
2. Limit the subject line to 50 characters
3. Capitalize the subject line
4. Do not end the subject line with a period
5. Use the imperative mood in the subject line
6. Wrap the body at 72 characters
7. Use the body to explain _what_ and _why_ vs. _how_
## Installation
### Composer/Packagist
`composer [global] require `[`aaronjameslang/git-hook-commit-message-cbeams`](https://packagist.org/packages/aaronjameslang/git-hook-commit-message-cbeams)
## Compatibility
Please contribute by testing on your platform if it's not listed below!
### Confirmed :thumbsup:
- Mac OSX, travis CI
- Debian, travis CI
### Unsupported :no_entry_sign:
- Windows
### Unknown :grey_question:
- Other *nixes, *BSDs, etc.
## Contribution
If you don't see your platform in the compatibility list above, please run the tests and let us know how you get on.
### Design Decisions
- The tests were originally written using `roundup`, however this dependency was dropped to make it easier for users to run the tests on their machines
| Add instructions for installing via composer | Add instructions for installing via composer | Markdown | mit | aaronjameslang/git-hook-commit-message-cbeams | markdown | ## Code Before:
Git hook to check conformance to http://chris.beams.io/posts/git-commit/
## The seven rules of a great git commit message
1. Separate subject from body with a blank line
2. Limit the subject line to 50 characters
3. Capitalize the subject line
4. Do not end the subject line with a period
5. Use the imperative mood in the subject line
6. Wrap the body at 72 characters
7. Use the body to explain _what_ and _why_ vs. _how_
## Compatibility
Please contribute by testing on your platform if it's not listed below!
### Confirmed :thumbsup:
- Mac OSX, travis CI
- Debian, travis CI
### Unsupported :no_entry_sign:
- Windows
### Unknown :grey_question:
- Other *nixes, *BSDs, etc.
## Contribution
If you don't see your platform in the compatibility list above, please run the tests and let us know how you get on.
### Design Decisions
- The tests were originally written using `roundup`, however this dependency was dropped to make it easier for users to run the tests on their machines
## Instruction:
Add instructions for installing via composer
## Code After:
Git hook to check conformance to http://chris.beams.io/posts/git-commit/
## The seven rules of a great git commit message
1. Separate subject from body with a blank line
2. Limit the subject line to 50 characters
3. Capitalize the subject line
4. Do not end the subject line with a period
5. Use the imperative mood in the subject line
6. Wrap the body at 72 characters
7. Use the body to explain _what_ and _why_ vs. _how_
## Installation
### Composer/Packagist
`composer [global] require `[`aaronjameslang/git-hook-commit-message-cbeams`](https://packagist.org/packages/aaronjameslang/git-hook-commit-message-cbeams)
## Compatibility
Please contribute by testing on your platform if it's not listed below!
### Confirmed :thumbsup:
- Mac OSX, travis CI
- Debian, travis CI
### Unsupported :no_entry_sign:
- Windows
### Unknown :grey_question:
- Other *nixes, *BSDs, etc.
## Contribution
If you don't see your platform in the compatibility list above, please run the tests and let us know how you get on.
### Design Decisions
- The tests were originally written using `roundup`, however this dependency was dropped to make it easier for users to run the tests on their machines
| Git hook to check conformance to http://chris.beams.io/posts/git-commit/
## The seven rules of a great git commit message
1. Separate subject from body with a blank line
2. Limit the subject line to 50 characters
3. Capitalize the subject line
4. Do not end the subject line with a period
5. Use the imperative mood in the subject line
6. Wrap the body at 72 characters
7. Use the body to explain _what_ and _why_ vs. _how_
+
+ ## Installation
+
+ ### Composer/Packagist
+
+ `composer [global] require `[`aaronjameslang/git-hook-commit-message-cbeams`](https://packagist.org/packages/aaronjameslang/git-hook-commit-message-cbeams)
## Compatibility
Please contribute by testing on your platform if it's not listed below!
### Confirmed :thumbsup:
- Mac OSX, travis CI
- Debian, travis CI
### Unsupported :no_entry_sign:
- Windows
### Unknown :grey_question:
- Other *nixes, *BSDs, etc.
## Contribution
If you don't see your platform in the compatibility list above, please run the tests and let us know how you get on.
### Design Decisions
- The tests were originally written using `roundup`, however this dependency was dropped to make it easier for users to run the tests on their machines | 6 | 0.166667 | 6 | 0 |
527f4252e672d07cdf328e79ab2810abd49a17c0 | salt/nginx/map.jinja | salt/nginx/map.jinja | {% set nginx = salt['grains.filter_by']({
'default': {
'keepalive_timeout': 120,
'install_from_source': True,
'log_formats': {
'main': '$remote_addr - $remote_user [$time_local] $http_host "$request" $status $body_bytes_sent $request_time $upstream_response_time "$http_referer" "$http_user_agent" "$http_x_forwarded_for"',
},
'log_files': {
'access.log': 'main'
},
'extra_http': [],
'package': 'nginx',
'repo': 'deb https://nginx.org/packages/mainline/debian/ jessie nginx',
'repo_key_url': 'https://nginx.org/keys/nginx_signing.key',
'dh_keysize': 4096,
'allow_plaintext': True,
'add_default_https': True,
'add_default_http': True,
},
}, merge=salt['pillar.get']('nginx')) %}
| {% set nginx = salt['grains.filter_by']({
'base': {
'keepalive_timeout': 120,
'install_from_source': True,
'log_formats': {
'main': '$remote_addr - $remote_user [$time_local] $http_host "$request" $status $body_bytes_sent $request_time $upstream_response_time "$http_referer" "$http_user_agent" "$http_x_forwarded_for"',
},
'log_files': {
'access.log': 'main'
},
'extra_http': [],
'package': 'nginx',
'repo': 'deb https://nginx.org/packages/mainline/debian/ jessie nginx',
'repo_key_url': 'https://nginx.org/keys/nginx_signing.key',
'dh_keysize': 4096,
'allow_plaintext': True,
'add_default_https': True,
'add_default_http': True,
},
'stretch': {
'repo': 'deb https://nginx.org/packages/mainline/debian/ stretch nginx',
}
}, grain='oscodename', merge=salt['pillar.get']('nginx'), base='base') %}
| Add support for debian stretch to nginx state | Add support for debian stretch to nginx state
| HTML+Django | mit | thusoy/salt-states,thusoy/salt-states,thusoy/salt-states,thusoy/salt-states | html+django | ## Code Before:
{% set nginx = salt['grains.filter_by']({
'default': {
'keepalive_timeout': 120,
'install_from_source': True,
'log_formats': {
'main': '$remote_addr - $remote_user [$time_local] $http_host "$request" $status $body_bytes_sent $request_time $upstream_response_time "$http_referer" "$http_user_agent" "$http_x_forwarded_for"',
},
'log_files': {
'access.log': 'main'
},
'extra_http': [],
'package': 'nginx',
'repo': 'deb https://nginx.org/packages/mainline/debian/ jessie nginx',
'repo_key_url': 'https://nginx.org/keys/nginx_signing.key',
'dh_keysize': 4096,
'allow_plaintext': True,
'add_default_https': True,
'add_default_http': True,
},
}, merge=salt['pillar.get']('nginx')) %}
## Instruction:
Add support for debian stretch to nginx state
## Code After:
{% set nginx = salt['grains.filter_by']({
'base': {
'keepalive_timeout': 120,
'install_from_source': True,
'log_formats': {
'main': '$remote_addr - $remote_user [$time_local] $http_host "$request" $status $body_bytes_sent $request_time $upstream_response_time "$http_referer" "$http_user_agent" "$http_x_forwarded_for"',
},
'log_files': {
'access.log': 'main'
},
'extra_http': [],
'package': 'nginx',
'repo': 'deb https://nginx.org/packages/mainline/debian/ jessie nginx',
'repo_key_url': 'https://nginx.org/keys/nginx_signing.key',
'dh_keysize': 4096,
'allow_plaintext': True,
'add_default_https': True,
'add_default_http': True,
},
'stretch': {
'repo': 'deb https://nginx.org/packages/mainline/debian/ stretch nginx',
}
}, grain='oscodename', merge=salt['pillar.get']('nginx'), base='base') %}
| {% set nginx = salt['grains.filter_by']({
- 'default': {
+ 'base': {
'keepalive_timeout': 120,
'install_from_source': True,
'log_formats': {
'main': '$remote_addr - $remote_user [$time_local] $http_host "$request" $status $body_bytes_sent $request_time $upstream_response_time "$http_referer" "$http_user_agent" "$http_x_forwarded_for"',
},
'log_files': {
'access.log': 'main'
},
'extra_http': [],
'package': 'nginx',
'repo': 'deb https://nginx.org/packages/mainline/debian/ jessie nginx',
'repo_key_url': 'https://nginx.org/keys/nginx_signing.key',
'dh_keysize': 4096,
'allow_plaintext': True,
'add_default_https': True,
'add_default_http': True,
},
- }, merge=salt['pillar.get']('nginx')) %}
+ 'stretch': {
+ 'repo': 'deb https://nginx.org/packages/mainline/debian/ stretch nginx',
+ }
+ }, grain='oscodename', merge=salt['pillar.get']('nginx'), base='base') %} | 7 | 0.35 | 5 | 2 |
f1bcde1816dbb9728ced69ed08a613861f22f8c4 | example/config.yml | example/config.yml | <%- if progress_topic = ENV['BLOCKS_BATCH_PROGRESS_TOPIC'] -%>
progress_notification:
topic: <%= progress_topic %>
<%- end -%>
loggers:
- type: stdout
<%- if log_name = ENV['BLOCKS_BATCH_CLOUD_LOGGING_LOG_NAME'] -%>
- type: cloud_logging
log_name: <%= log_name %>
<%- end -%>
sustainer:
# delay:
# Delay message deadline for log jobs.
# It must be less than or equal to ACK_DEADLINE of subscription
# specified by BLOCKS_BATCH_PUBSUB_SUBSCRIPTION
# You can check ACK_DEADLINE by `gcloud beta pubsub subscriptions list`
# ACK_DEADLINE is set by `--ack-deadline` option for `gcloud beta pubsub subscriptions create`.
delay: 600
# interval
# The interval to send delay message. It must be lower than delay
# Default: 90% of delay
interval: 540
| <%- if progress_topic = ENV['BLOCKS_BATCH_PROGRESS_TOPIC'] -%>
progress_notification:
topic: <%= progress_topic %>
<%- end -%>
loggers:
- type: stdout
<%- if log_name = ENV['BLOCKS_BATCH_CLOUD_LOGGING_LOG_NAME'] -%>
- type: cloud_logging
log_name: <%= log_name %>
<%- end -%>
sustainer:
# delay:
# Delay message deadline for log jobs.
# It must be less than or equal to ACK_DEADLINE of subscription
# specified by BLOCKS_BATCH_PUBSUB_SUBSCRIPTION
# You can check ACK_DEADLINE by `gcloud beta pubsub subscriptions list`
# ACK_DEADLINE is set by `--ack-deadline` option for `gcloud beta pubsub subscriptions create`.
delay: <%= ENV['BLOCKS_BATCH_SUSTAINER_DELAY'] || 600 %>
# interval
# The interval to send delay message. It must be lower than delay
# Default: 90% of delay
interval: <%= ENV['BLOCKS_BATCH_SUSTAINER_INTERVAL'] || 540 %>
| Use environment variables for sutainer delay and interval | :+1: Use environment variables for sutainer delay and interval
| YAML | mit | groovenauts/magellan-gcs-proxy,groovenauts/magellan-gcs-proxy,groovenauts/magellan-gcs-proxy | yaml | ## Code Before:
<%- if progress_topic = ENV['BLOCKS_BATCH_PROGRESS_TOPIC'] -%>
progress_notification:
topic: <%= progress_topic %>
<%- end -%>
loggers:
- type: stdout
<%- if log_name = ENV['BLOCKS_BATCH_CLOUD_LOGGING_LOG_NAME'] -%>
- type: cloud_logging
log_name: <%= log_name %>
<%- end -%>
sustainer:
# delay:
# Delay message deadline for log jobs.
# It must be less than or equal to ACK_DEADLINE of subscription
# specified by BLOCKS_BATCH_PUBSUB_SUBSCRIPTION
# You can check ACK_DEADLINE by `gcloud beta pubsub subscriptions list`
# ACK_DEADLINE is set by `--ack-deadline` option for `gcloud beta pubsub subscriptions create`.
delay: 600
# interval
# The interval to send delay message. It must be lower than delay
# Default: 90% of delay
interval: 540
## Instruction:
:+1: Use environment variables for sutainer delay and interval
## Code After:
<%- if progress_topic = ENV['BLOCKS_BATCH_PROGRESS_TOPIC'] -%>
progress_notification:
topic: <%= progress_topic %>
<%- end -%>
loggers:
- type: stdout
<%- if log_name = ENV['BLOCKS_BATCH_CLOUD_LOGGING_LOG_NAME'] -%>
- type: cloud_logging
log_name: <%= log_name %>
<%- end -%>
sustainer:
# delay:
# Delay message deadline for log jobs.
# It must be less than or equal to ACK_DEADLINE of subscription
# specified by BLOCKS_BATCH_PUBSUB_SUBSCRIPTION
# You can check ACK_DEADLINE by `gcloud beta pubsub subscriptions list`
# ACK_DEADLINE is set by `--ack-deadline` option for `gcloud beta pubsub subscriptions create`.
delay: <%= ENV['BLOCKS_BATCH_SUSTAINER_DELAY'] || 600 %>
# interval
# The interval to send delay message. It must be lower than delay
# Default: 90% of delay
interval: <%= ENV['BLOCKS_BATCH_SUSTAINER_INTERVAL'] || 540 %>
| <%- if progress_topic = ENV['BLOCKS_BATCH_PROGRESS_TOPIC'] -%>
progress_notification:
topic: <%= progress_topic %>
<%- end -%>
loggers:
- type: stdout
<%- if log_name = ENV['BLOCKS_BATCH_CLOUD_LOGGING_LOG_NAME'] -%>
- type: cloud_logging
log_name: <%= log_name %>
<%- end -%>
sustainer:
# delay:
# Delay message deadline for log jobs.
# It must be less than or equal to ACK_DEADLINE of subscription
# specified by BLOCKS_BATCH_PUBSUB_SUBSCRIPTION
# You can check ACK_DEADLINE by `gcloud beta pubsub subscriptions list`
# ACK_DEADLINE is set by `--ack-deadline` option for `gcloud beta pubsub subscriptions create`.
- delay: 600
+ delay: <%= ENV['BLOCKS_BATCH_SUSTAINER_DELAY'] || 600 %>
# interval
# The interval to send delay message. It must be lower than delay
# Default: 90% of delay
- interval: 540
+ interval: <%= ENV['BLOCKS_BATCH_SUSTAINER_INTERVAL'] || 540 %> | 4 | 0.16 | 2 | 2 |
712d24672d0e8a3d5626008e6a937c39e31ebf43 | entry_types/scrolled/package/src/entryState/watchCollections.js | entry_types/scrolled/package/src/entryState/watchCollections.js | import {watchCollection} from '../collections';
export function watchCollections({chapters, sections, contentElements, files}, {dispatch}) {
watchCollection(chapters, {
name: 'chapters',
attributes: ['id', 'permaId'],
includeConfiguration: true,
dispatch
});
watchCollection(sections, {
name: 'sections',
attributes: ['id', 'permaId', 'chapterId'],
includeConfiguration: true,
dispatch
});
watchCollection(contentElements, {
name: 'contentElements',
attributes: ['id', 'permaId', 'typeName', 'sectionId'],
keyAttribute: 'permaId',
includeConfiguration: true,
dispatch
});
Object.keys(files).forEach(collectionName => {
watchCollection(files[collectionName], {
name: camelize(collectionName),
attributes: ['id', {permaId: 'perma_id'}, 'width', 'height', 'basename'],
keyAttribute: 'permaId',
includeConfiguration: true,
dispatch
});
});
}
function camelize(snakeCase) {
return snakeCase.replace(/_[a-z]/g, function(match) {
return match[1].toUpperCase();
});
}
| import {watchCollection} from '../collections';
export function watchCollections({chapters, sections, contentElements, files}, {dispatch}) {
watchCollection(chapters, {
name: 'chapters',
attributes: ['id', 'permaId'],
keyAttribute: 'permaId',
includeConfiguration: true,
dispatch
});
watchCollection(sections, {
name: 'sections',
attributes: ['id', 'permaId', 'chapterId'],
keyAttribute: 'permaId',
includeConfiguration: true,
dispatch
});
watchCollection(contentElements, {
name: 'contentElements',
attributes: ['id', 'permaId', 'typeName', 'sectionId'],
keyAttribute: 'permaId',
includeConfiguration: true,
dispatch
});
Object.keys(files).forEach(collectionName => {
watchCollection(files[collectionName], {
name: camelize(collectionName),
attributes: ['id', {permaId: 'perma_id'}, 'width', 'height', 'basename'],
keyAttribute: 'permaId',
includeConfiguration: true,
dispatch
});
});
}
function camelize(snakeCase) {
return snakeCase.replace(/_[a-z]/g, function(match) {
return match[1].toUpperCase();
});
}
| Index all scrolled models by perma id in entry state | Index all scrolled models by perma id in entry state
| JavaScript | mit | codevise/pageflow,tf/pageflow,tf/pageflow,codevise/pageflow,tf/pageflow,tf/pageflow,codevise/pageflow,codevise/pageflow | javascript | ## Code Before:
import {watchCollection} from '../collections';
export function watchCollections({chapters, sections, contentElements, files}, {dispatch}) {
watchCollection(chapters, {
name: 'chapters',
attributes: ['id', 'permaId'],
includeConfiguration: true,
dispatch
});
watchCollection(sections, {
name: 'sections',
attributes: ['id', 'permaId', 'chapterId'],
includeConfiguration: true,
dispatch
});
watchCollection(contentElements, {
name: 'contentElements',
attributes: ['id', 'permaId', 'typeName', 'sectionId'],
keyAttribute: 'permaId',
includeConfiguration: true,
dispatch
});
Object.keys(files).forEach(collectionName => {
watchCollection(files[collectionName], {
name: camelize(collectionName),
attributes: ['id', {permaId: 'perma_id'}, 'width', 'height', 'basename'],
keyAttribute: 'permaId',
includeConfiguration: true,
dispatch
});
});
}
function camelize(snakeCase) {
return snakeCase.replace(/_[a-z]/g, function(match) {
return match[1].toUpperCase();
});
}
## Instruction:
Index all scrolled models by perma id in entry state
## Code After:
import {watchCollection} from '../collections';
export function watchCollections({chapters, sections, contentElements, files}, {dispatch}) {
watchCollection(chapters, {
name: 'chapters',
attributes: ['id', 'permaId'],
keyAttribute: 'permaId',
includeConfiguration: true,
dispatch
});
watchCollection(sections, {
name: 'sections',
attributes: ['id', 'permaId', 'chapterId'],
keyAttribute: 'permaId',
includeConfiguration: true,
dispatch
});
watchCollection(contentElements, {
name: 'contentElements',
attributes: ['id', 'permaId', 'typeName', 'sectionId'],
keyAttribute: 'permaId',
includeConfiguration: true,
dispatch
});
Object.keys(files).forEach(collectionName => {
watchCollection(files[collectionName], {
name: camelize(collectionName),
attributes: ['id', {permaId: 'perma_id'}, 'width', 'height', 'basename'],
keyAttribute: 'permaId',
includeConfiguration: true,
dispatch
});
});
}
function camelize(snakeCase) {
return snakeCase.replace(/_[a-z]/g, function(match) {
return match[1].toUpperCase();
});
}
| import {watchCollection} from '../collections';
export function watchCollections({chapters, sections, contentElements, files}, {dispatch}) {
watchCollection(chapters, {
name: 'chapters',
attributes: ['id', 'permaId'],
+ keyAttribute: 'permaId',
includeConfiguration: true,
dispatch
});
watchCollection(sections, {
name: 'sections',
attributes: ['id', 'permaId', 'chapterId'],
+ keyAttribute: 'permaId',
includeConfiguration: true,
dispatch
});
watchCollection(contentElements, {
name: 'contentElements',
attributes: ['id', 'permaId', 'typeName', 'sectionId'],
keyAttribute: 'permaId',
includeConfiguration: true,
dispatch
});
Object.keys(files).forEach(collectionName => {
watchCollection(files[collectionName], {
name: camelize(collectionName),
attributes: ['id', {permaId: 'perma_id'}, 'width', 'height', 'basename'],
keyAttribute: 'permaId',
includeConfiguration: true,
dispatch
});
});
}
function camelize(snakeCase) {
return snakeCase.replace(/_[a-z]/g, function(match) {
return match[1].toUpperCase();
});
} | 2 | 0.051282 | 2 | 0 |
a9c1dba5daf791f081325eed2a58491737f398b2 | README.rst | README.rst | gir-dylan
=========
These are Dylan bindings for the `gobject-introspection`_ library
(`reference documentation`_).
To build them, you will need to build and install the gobject-introspection
library first. You will also need OpenDylan 2012.1 or later.
This project also contains ``gir-generate-c-ffi`` which takes the
gobject-introspection metadata and generates the appropriate `C-FFI`_
bindings for consumption by the Open Dylan compiler.
.. _gobject-introspection: https://live.gnome.org/GObjectIntrospection
.. _reference documentation: https://developer.gnome.org/gi/stable/
.. _C-FFI: http://opendylan.org/documentation/library-reference/c-ffi/index.html
| gir-dylan
=========
These are Dylan bindings for the `gobject-introspection`_ library
(`reference documentation`_).
To build them, you will need to build and install the gobject-introspection
library first. You will also need OpenDylan 2012.1 or later.
This project also contains ``gir-generate-c-ffi`` which takes the
gobject-introspection metadata and generates the appropriate `C-FFI`_
bindings for consumption by the Open Dylan compiler.
GIR metadata comes in 2 forms, XML files ending in ``.gir`` and binary
files compiled from the XML ending in ``.typelib``. After installing
the `gobject-introspection`_ library, you may need to install additional
packages to have the compiled type libraries available. Compiled type
libraries are typically found in ``/usr/lib/girepository-1.0/`` or
``/usr/local/lib/girepository-1.0/``. GIR files, which we do not
use, are found in ``/usr/share/gir-1.0/`` or ``/usr/local/share/gir-1.0/``.
On Ubuntu, the packages for installing this metadata typically begin
with the prefix ``gir1.2-``.
.. _gobject-introspection: https://live.gnome.org/GObjectIntrospection
.. _reference documentation: https://developer.gnome.org/gi/stable/
.. _C-FFI: http://opendylan.org/documentation/library-reference/c-ffi/index.html
| Add note about how to find the binding metadata. | Add note about how to find the binding metadata.
| reStructuredText | mit | dylan-foundry/gir-dylan | restructuredtext | ## Code Before:
gir-dylan
=========
These are Dylan bindings for the `gobject-introspection`_ library
(`reference documentation`_).
To build them, you will need to build and install the gobject-introspection
library first. You will also need OpenDylan 2012.1 or later.
This project also contains ``gir-generate-c-ffi`` which takes the
gobject-introspection metadata and generates the appropriate `C-FFI`_
bindings for consumption by the Open Dylan compiler.
.. _gobject-introspection: https://live.gnome.org/GObjectIntrospection
.. _reference documentation: https://developer.gnome.org/gi/stable/
.. _C-FFI: http://opendylan.org/documentation/library-reference/c-ffi/index.html
## Instruction:
Add note about how to find the binding metadata.
## Code After:
gir-dylan
=========
These are Dylan bindings for the `gobject-introspection`_ library
(`reference documentation`_).
To build them, you will need to build and install the gobject-introspection
library first. You will also need OpenDylan 2012.1 or later.
This project also contains ``gir-generate-c-ffi`` which takes the
gobject-introspection metadata and generates the appropriate `C-FFI`_
bindings for consumption by the Open Dylan compiler.
GIR metadata comes in 2 forms, XML files ending in ``.gir`` and binary
files compiled from the XML ending in ``.typelib``. After installing
the `gobject-introspection`_ library, you may need to install additional
packages to have the compiled type libraries available. Compiled type
libraries are typically found in ``/usr/lib/girepository-1.0/`` or
``/usr/local/lib/girepository-1.0/``. GIR files, which we do not
use, are found in ``/usr/share/gir-1.0/`` or ``/usr/local/share/gir-1.0/``.
On Ubuntu, the packages for installing this metadata typically begin
with the prefix ``gir1.2-``.
.. _gobject-introspection: https://live.gnome.org/GObjectIntrospection
.. _reference documentation: https://developer.gnome.org/gi/stable/
.. _C-FFI: http://opendylan.org/documentation/library-reference/c-ffi/index.html
| gir-dylan
=========
These are Dylan bindings for the `gobject-introspection`_ library
(`reference documentation`_).
To build them, you will need to build and install the gobject-introspection
library first. You will also need OpenDylan 2012.1 or later.
This project also contains ``gir-generate-c-ffi`` which takes the
gobject-introspection metadata and generates the appropriate `C-FFI`_
bindings for consumption by the Open Dylan compiler.
+ GIR metadata comes in 2 forms, XML files ending in ``.gir`` and binary
+ files compiled from the XML ending in ``.typelib``. After installing
+ the `gobject-introspection`_ library, you may need to install additional
+ packages to have the compiled type libraries available. Compiled type
+ libraries are typically found in ``/usr/lib/girepository-1.0/`` or
+ ``/usr/local/lib/girepository-1.0/``. GIR files, which we do not
+ use, are found in ``/usr/share/gir-1.0/`` or ``/usr/local/share/gir-1.0/``.
+
+ On Ubuntu, the packages for installing this metadata typically begin
+ with the prefix ``gir1.2-``.
+
.. _gobject-introspection: https://live.gnome.org/GObjectIntrospection
.. _reference documentation: https://developer.gnome.org/gi/stable/
.. _C-FFI: http://opendylan.org/documentation/library-reference/c-ffi/index.html | 11 | 0.6875 | 11 | 0 |
24cb44b1d867b9819f98cd1d28076a3f432a582e | nvim/settings/plugin-sideways.vim | nvim/settings/plugin-sideways.vim | " ============================
" Sideways Plugin
" ============================
nnoremap g<C-L> :SidewaysRight<cr>
nnoremap g<C-H> :SidewaysLeft<cr>
" ============================
" Fugitiv Plugin
" ============================
nnoremap <leader>gd :Gdiff<cr>
nnoremap <leader>gs :Gstatus<cr>
nnoremap <leader>gw :Gwrite<cr>
nnoremap <leader>ga :Gadd<cr>
nnoremap <leader>gb :Gblame<cr>
nnoremap <leader>gco :Gcheckout<cr>
nnoremap <leader>gci :Gcommit<cr>
nnoremap <leader>gm :Gmove<cr>
nnoremap <leader>gr :Gremove<cr>
nnoremap <leader>gl :Shell git gl -18<cr>:wincmd \|<cr>
| " ============================
" Sideways Plugin
" ============================
nnoremap g<C-L> :SidewaysRight<cr>
nnoremap g<C-H> :SidewaysLeft<cr>
| Remove leftovers mappings in the wrong place | [vim] Remove leftovers mappings in the wrong place
| VimL | bsd-2-clause | lfilho/dotfiles,lfilho/dotfiles,lfilho/dotfiles,yjlintw/dotfiles,yjlintw/dotfiles,yjlintw/dotfiles | viml | ## Code Before:
" ============================
" Sideways Plugin
" ============================
nnoremap g<C-L> :SidewaysRight<cr>
nnoremap g<C-H> :SidewaysLeft<cr>
" ============================
" Fugitiv Plugin
" ============================
nnoremap <leader>gd :Gdiff<cr>
nnoremap <leader>gs :Gstatus<cr>
nnoremap <leader>gw :Gwrite<cr>
nnoremap <leader>ga :Gadd<cr>
nnoremap <leader>gb :Gblame<cr>
nnoremap <leader>gco :Gcheckout<cr>
nnoremap <leader>gci :Gcommit<cr>
nnoremap <leader>gm :Gmove<cr>
nnoremap <leader>gr :Gremove<cr>
nnoremap <leader>gl :Shell git gl -18<cr>:wincmd \|<cr>
## Instruction:
[vim] Remove leftovers mappings in the wrong place
## Code After:
" ============================
" Sideways Plugin
" ============================
nnoremap g<C-L> :SidewaysRight<cr>
nnoremap g<C-H> :SidewaysLeft<cr>
| " ============================
" Sideways Plugin
" ============================
nnoremap g<C-L> :SidewaysRight<cr>
nnoremap g<C-H> :SidewaysLeft<cr>
-
- " ============================
- " Fugitiv Plugin
- " ============================
- nnoremap <leader>gd :Gdiff<cr>
- nnoremap <leader>gs :Gstatus<cr>
- nnoremap <leader>gw :Gwrite<cr>
- nnoremap <leader>ga :Gadd<cr>
- nnoremap <leader>gb :Gblame<cr>
- nnoremap <leader>gco :Gcheckout<cr>
- nnoremap <leader>gci :Gcommit<cr>
- nnoremap <leader>gm :Gmove<cr>
- nnoremap <leader>gr :Gremove<cr>
- nnoremap <leader>gl :Shell git gl -18<cr>:wincmd \|<cr> | 14 | 0.736842 | 0 | 14 |
dfd9710313352026ac3dcedd993a06241dbb665c | bin/cd_validate_build.sh | bin/cd_validate_build.sh |
set -x
set -e
STAGE=$1
VERSION=$2
INDEX_URL=""
if [ "$STAGE" = "test" ]; then
INDEX_URL="--index-url https://test.pypi.org/simple/"
fi;
mkdir "validate_$STAGE"
cd "validate_$STAGE"
python -m venv "venv_$STAGE"
. "venv_$STAGE/bin/activate"
pip install $INDEX_URL parsenvy=="$VERSION"
TEST_INT=42 python -c "import parsenvy; assert parsenvy.int('TEST_INT') == 42"
deactivate
cd ..
|
set -x
set -e
VERSION=$(poetry version -s)
mkdir "validate_build"
cd "validate_build"
python -m venv "venv"
. "venv/bin/activate"
pip install "../dist/Parsenvy-$VERSION-py3-none-any.whl"
TEST_INT=42 python -c "import parsenvy; assert parsenvy.int('TEST_INT') == 42"
deactivate
cd ..
| Update validation script to use built wheel | Update validation script to use built wheel
| Shell | bsd-3-clause | nkantar/Parsenvy | shell | ## Code Before:
set -x
set -e
STAGE=$1
VERSION=$2
INDEX_URL=""
if [ "$STAGE" = "test" ]; then
INDEX_URL="--index-url https://test.pypi.org/simple/"
fi;
mkdir "validate_$STAGE"
cd "validate_$STAGE"
python -m venv "venv_$STAGE"
. "venv_$STAGE/bin/activate"
pip install $INDEX_URL parsenvy=="$VERSION"
TEST_INT=42 python -c "import parsenvy; assert parsenvy.int('TEST_INT') == 42"
deactivate
cd ..
## Instruction:
Update validation script to use built wheel
## Code After:
set -x
set -e
VERSION=$(poetry version -s)
mkdir "validate_build"
cd "validate_build"
python -m venv "venv"
. "venv/bin/activate"
pip install "../dist/Parsenvy-$VERSION-py3-none-any.whl"
TEST_INT=42 python -c "import parsenvy; assert parsenvy.int('TEST_INT') == 42"
deactivate
cd ..
|
set -x
set -e
+ VERSION=$(poetry version -s)
- STAGE=$1
- VERSION=$2
- INDEX_URL=""
- if [ "$STAGE" = "test" ]; then
- INDEX_URL="--index-url https://test.pypi.org/simple/"
- fi;
-
- mkdir "validate_$STAGE"
? ^^^^^^
+ mkdir "validate_build"
? ^^^^^
- cd "validate_$STAGE"
+ cd "validate_build"
- python -m venv "venv_$STAGE"
? -------
+ python -m venv "venv"
- . "venv_$STAGE/bin/activate"
? -------
+ . "venv/bin/activate"
- pip install $INDEX_URL parsenvy=="$VERSION"
+ pip install "../dist/Parsenvy-$VERSION-py3-none-any.whl"
TEST_INT=42 python -c "import parsenvy; assert parsenvy.int('TEST_INT') == 42"
deactivate
cd .. | 18 | 0.9 | 6 | 12 |
edc8c56c4752aa10473a88ee7d3950b1087b00b7 | components/Header.tsx | components/Header.tsx | import Link from "next/link"
import { Fragment } from "react"
import HorizontalRule from "./HorizontalRule"
const Header = () => (
<Fragment>
<header>
<nav>
<Link href="/">
<a>Home</a>
</Link>
<Link href="/apps">
<a>Apps</a>
</Link>
<Link href="/posts">
<a>Posts</a>
</Link>
</nav>
<div className="horizontal-rule-container">
<HorizontalRule />
</div>
</header>
<style jsx>{`
header {
width: 100vw;
padding-top: 8px;
display: flex;
flex-direction: column;
}
nav {
display: inline-flex;
align-self: center;
overflow-x: scroll;
max-width: 100%;
}
nav a:first-child {
margin-left: var(--content-padding-x);
}
nav a:last-child {
margin-right: var(--content-padding-x);
}
a {
padding: 8px;
font-size: 1.5em;
white-space: nowrap;
}
.horizontal-rule-container {
width: var(--content-width);
margin: 0 auto;
}
`}</style>
</Fragment>
)
export default Header
| import Link from "next/link"
import { Fragment } from "react"
import HorizontalRule from "./HorizontalRule"
const Header = () => (
<Fragment>
<header>
<nav>
<Link href="/">
<a>Home</a>
</Link>
<Link href="/apps">
<a>Apps</a>
</Link>
<Link href="/posts">
<a>Posts</a>
</Link>
<Link href="/open-source">
<a>Open Source</a>
</Link>
</nav>
<div className="horizontal-rule-container">
<HorizontalRule />
</div>
</header>
<style jsx>{`
header {
width: 100vw;
padding-top: 8px;
display: flex;
flex-direction: column;
}
nav {
display: inline-flex;
align-self: center;
overflow-x: scroll;
max-width: 100%;
}
nav a:first-child {
margin-left: var(--content-padding-x);
}
nav a:last-child {
margin-right: var(--content-padding-x);
}
a {
padding: 8px;
font-size: 1.5em;
white-space: nowrap;
}
.horizontal-rule-container {
width: var(--content-width);
margin: 0 auto;
}
`}</style>
</Fragment>
)
export default Header
| Add open source link to header | Add open source link to header
| TypeScript | mit | JosephDuffy/josephduffy.co.uk,JosephDuffy/josephduffy.co.uk,JosephDuffy/josephduffy.co.uk | typescript | ## Code Before:
import Link from "next/link"
import { Fragment } from "react"
import HorizontalRule from "./HorizontalRule"
const Header = () => (
<Fragment>
<header>
<nav>
<Link href="/">
<a>Home</a>
</Link>
<Link href="/apps">
<a>Apps</a>
</Link>
<Link href="/posts">
<a>Posts</a>
</Link>
</nav>
<div className="horizontal-rule-container">
<HorizontalRule />
</div>
</header>
<style jsx>{`
header {
width: 100vw;
padding-top: 8px;
display: flex;
flex-direction: column;
}
nav {
display: inline-flex;
align-self: center;
overflow-x: scroll;
max-width: 100%;
}
nav a:first-child {
margin-left: var(--content-padding-x);
}
nav a:last-child {
margin-right: var(--content-padding-x);
}
a {
padding: 8px;
font-size: 1.5em;
white-space: nowrap;
}
.horizontal-rule-container {
width: var(--content-width);
margin: 0 auto;
}
`}</style>
</Fragment>
)
export default Header
## Instruction:
Add open source link to header
## Code After:
import Link from "next/link"
import { Fragment } from "react"
import HorizontalRule from "./HorizontalRule"
const Header = () => (
<Fragment>
<header>
<nav>
<Link href="/">
<a>Home</a>
</Link>
<Link href="/apps">
<a>Apps</a>
</Link>
<Link href="/posts">
<a>Posts</a>
</Link>
<Link href="/open-source">
<a>Open Source</a>
</Link>
</nav>
<div className="horizontal-rule-container">
<HorizontalRule />
</div>
</header>
<style jsx>{`
header {
width: 100vw;
padding-top: 8px;
display: flex;
flex-direction: column;
}
nav {
display: inline-flex;
align-self: center;
overflow-x: scroll;
max-width: 100%;
}
nav a:first-child {
margin-left: var(--content-padding-x);
}
nav a:last-child {
margin-right: var(--content-padding-x);
}
a {
padding: 8px;
font-size: 1.5em;
white-space: nowrap;
}
.horizontal-rule-container {
width: var(--content-width);
margin: 0 auto;
}
`}</style>
</Fragment>
)
export default Header
| import Link from "next/link"
import { Fragment } from "react"
import HorizontalRule from "./HorizontalRule"
const Header = () => (
<Fragment>
<header>
<nav>
<Link href="/">
<a>Home</a>
</Link>
<Link href="/apps">
<a>Apps</a>
</Link>
<Link href="/posts">
<a>Posts</a>
+ </Link>
+ <Link href="/open-source">
+ <a>Open Source</a>
</Link>
</nav>
<div className="horizontal-rule-container">
<HorizontalRule />
</div>
</header>
<style jsx>{`
header {
width: 100vw;
padding-top: 8px;
display: flex;
flex-direction: column;
}
nav {
display: inline-flex;
align-self: center;
overflow-x: scroll;
max-width: 100%;
}
nav a:first-child {
margin-left: var(--content-padding-x);
}
nav a:last-child {
margin-right: var(--content-padding-x);
}
a {
padding: 8px;
font-size: 1.5em;
white-space: nowrap;
}
.horizontal-rule-container {
width: var(--content-width);
margin: 0 auto;
}
`}</style>
</Fragment>
)
export default Header | 3 | 0.05 | 3 | 0 |
288eb79d03e53f5b3698e43abae2853ae65cf8db | docs/apis/index.rst | docs/apis/index.rst | Application Programming Interfaces (APIs)
=========================================
There are several ways of interacting with the HTCondor system.
Depending on your application and resources, the interfaces to HTCondor
listed below may be useful for your installation. Generally speaking, to
submit jobs from a program or web service, or to monitor HTCondor, the
python bindings are the easiest approach. Chirp provides a convenient
way for a running job to update information about itself to its job ad,
or to remotely read or write files from the executing job on the worker
node to/from the submitting machine.
Older programs which have SOAP bindings may find the HTCondor SOAP
interfaces useful.
If you have developed an interface to HTCondor, please consider sharing
it with the HTCondor community.
.. toctree::
:maxdepth: 2
:glob:
python-bindings/index.rst
chirp
user-job-log-reader-api
command-line-interface
drmaa-api
| Application Programming Interfaces (APIs)
=========================================
There are several ways of interacting with the HTCondor system.
Depending on your application and resources, the interfaces to HTCondor
listed below may be useful for your installation. Generally speaking, to
submit jobs from a program or web service, or to monitor HTCondor, the
python bindings are the easiest approach. Chirp provides a convenient
way for a running job to update information about itself to its job ad,
or to remotely read or write files from the executing job on the worker
node to/from the submitting machine.
If you have developed an interface to HTCondor, please consider sharing
it with the HTCondor community.
.. toctree::
:maxdepth: 2
:glob:
python-bindings/index.rst
chirp
user-job-log-reader-api
command-line-interface
drmaa-api
| Remove SOAP mention from docs | Remove SOAP mention from docs
| reStructuredText | apache-2.0 | htcondor/htcondor,htcondor/htcondor,htcondor/htcondor,htcondor/htcondor,htcondor/htcondor,htcondor/htcondor,htcondor/htcondor,htcondor/htcondor | restructuredtext | ## Code Before:
Application Programming Interfaces (APIs)
=========================================
There are several ways of interacting with the HTCondor system.
Depending on your application and resources, the interfaces to HTCondor
listed below may be useful for your installation. Generally speaking, to
submit jobs from a program or web service, or to monitor HTCondor, the
python bindings are the easiest approach. Chirp provides a convenient
way for a running job to update information about itself to its job ad,
or to remotely read or write files from the executing job on the worker
node to/from the submitting machine.
Older programs which have SOAP bindings may find the HTCondor SOAP
interfaces useful.
If you have developed an interface to HTCondor, please consider sharing
it with the HTCondor community.
.. toctree::
:maxdepth: 2
:glob:
python-bindings/index.rst
chirp
user-job-log-reader-api
command-line-interface
drmaa-api
## Instruction:
Remove SOAP mention from docs
## Code After:
Application Programming Interfaces (APIs)
=========================================
There are several ways of interacting with the HTCondor system.
Depending on your application and resources, the interfaces to HTCondor
listed below may be useful for your installation. Generally speaking, to
submit jobs from a program or web service, or to monitor HTCondor, the
python bindings are the easiest approach. Chirp provides a convenient
way for a running job to update information about itself to its job ad,
or to remotely read or write files from the executing job on the worker
node to/from the submitting machine.
If you have developed an interface to HTCondor, please consider sharing
it with the HTCondor community.
.. toctree::
:maxdepth: 2
:glob:
python-bindings/index.rst
chirp
user-job-log-reader-api
command-line-interface
drmaa-api
| Application Programming Interfaces (APIs)
=========================================
There are several ways of interacting with the HTCondor system.
Depending on your application and resources, the interfaces to HTCondor
listed below may be useful for your installation. Generally speaking, to
submit jobs from a program or web service, or to monitor HTCondor, the
python bindings are the easiest approach. Chirp provides a convenient
way for a running job to update information about itself to its job ad,
or to remotely read or write files from the executing job on the worker
node to/from the submitting machine.
-
- Older programs which have SOAP bindings may find the HTCondor SOAP
- interfaces useful.
If you have developed an interface to HTCondor, please consider sharing
it with the HTCondor community.
.. toctree::
:maxdepth: 2
:glob:
python-bindings/index.rst
chirp
user-job-log-reader-api
command-line-interface
drmaa-api
| 3 | 0.103448 | 0 | 3 |
1f19a855af5ba0aacc083a8857dcb9e863b06976 | tools/owncloud-unlockadmin.sh | tools/owncloud-unlockadmin.sh |
source /etc/mailinabox.conf # load global vars
ADMIN=$(sqlite3 $STORAGE_ROOT/mail/users.sqlite "SELECT email FROM users WHERE privileges = 'admin' ORDER BY id ASC LIMIT 1")
test -z "$1" || ADMIN=$1
echo I am going to unlock admin features for $ADMIN.
echo You can provide another user to unlock as the first argument of this script.
echo
echo WARNING: you could break mail-in-a-box when fiddling around with owncloud\'s admin interface
echo If in doubt, press CTRL-C to cancel.
echo
echo Press enter to continue.
read
sqlite3 $STORAGE_ROOT/owncloud/owncloud.db "INSERT OR IGNORE INTO oc_group_user VALUES ('admin', '$ADMIN')" && echo Done.
|
source /etc/mailinabox.conf # load global vars
ADMIN=$(./mail.py user admins | head -n 1)
test -z "$1" || ADMIN=$1
echo I am going to unlock admin features for $ADMIN.
echo You can provide another user to unlock as the first argument of this script.
echo
echo WARNING: you could break mail-in-a-box when fiddling around with owncloud\'s admin interface
echo If in doubt, press CTRL-C to cancel.
echo
echo Press enter to continue.
read
sqlite3 $STORAGE_ROOT/owncloud/owncloud.db "INSERT OR IGNORE INTO oc_group_user VALUES ('admin', '$ADMIN')" && echo Done.
| Use mail.py to get the admin user | Use mail.py to get the admin user
| Shell | cc0-1.0 | pierreozoux/mailinabox,PortableTech/mailinabox,b-deng/mailinabox,brocktice/mailinabox,Neopallium/mailinabox,choltha/mailinabox,PortableTech/mailinabox,jjz/mailinabox,jmedding/mailinabox,b-deng/mailinabox,jprice/mailinabox,jjz/mailinabox,Neopallium/mailinabox,PortableTech/mailinabox,brocktice/mailinabox,m4rcs/mailinabox,Neopallium/mailinabox,hnk/mailinabox,jjz/mailinabox,m4rcs/mailinabox,ianberinger/mailinabox,d5ve/mailinabox,choltha/mailinabox,b-deng/mailinabox,jmedding/mailinabox,jprice/mailinabox,jmedding/mailinabox,nstanke/mailinabox,hnk/mailinabox,hnk/mailinabox,d5ve/mailinabox,PortableTech/mailinabox,d5ve/mailinabox,brocktice/mailinabox,pierreozoux/mailinabox,mboersma/mailinabox,brocktice/mailinabox,Toilal/mailinabox,pichak/mailinabox,mail-in-a-box/mailinabox,PortableTech/mailinabox,choltha/mailinabox,jmedding/mailinabox,jprice/mailinabox,nstanke/mailinabox,choltha/mailinabox,nstanke/mailinabox,jmedding/mailinabox,b-deng/mailinabox,jprice/mailinabox,pichak/mailinabox,ianberinger/mailinabox,mail-in-a-box/mailinabox,d5ve/mailinabox,ianberinger/mailinabox,pierreozoux/mailinabox,Neopallium/mailinabox,mboersma/mailinabox,pierreozoux/mailinabox,pierreozoux/mailinabox,hnk/mailinabox,mail-in-a-box/mailinabox,mail-in-a-box/mailinabox,nstanke/mailinabox,mboersma/mailinabox,nstanke/mailinabox,Toilal/mailinabox,jprice/mailinabox,mboersma/mailinabox,Neopallium/mailinabox,m4rcs/mailinabox,ianberinger/mailinabox,b-deng/mailinabox,pichak/mailinabox,mboersma/mailinabox,jjz/mailinabox,choltha/mailinabox,m4rcs/mailinabox,Toilal/mailinabox,Toilal/mailinabox,ianberinger/mailinabox,m4rcs/mailinabox,brocktice/mailinabox,pichak/mailinabox,d5ve/mailinabox,jjz/mailinabox | shell | ## Code Before:
source /etc/mailinabox.conf # load global vars
ADMIN=$(sqlite3 $STORAGE_ROOT/mail/users.sqlite "SELECT email FROM users WHERE privileges = 'admin' ORDER BY id ASC LIMIT 1")
test -z "$1" || ADMIN=$1
echo I am going to unlock admin features for $ADMIN.
echo You can provide another user to unlock as the first argument of this script.
echo
echo WARNING: you could break mail-in-a-box when fiddling around with owncloud\'s admin interface
echo If in doubt, press CTRL-C to cancel.
echo
echo Press enter to continue.
read
sqlite3 $STORAGE_ROOT/owncloud/owncloud.db "INSERT OR IGNORE INTO oc_group_user VALUES ('admin', '$ADMIN')" && echo Done.
## Instruction:
Use mail.py to get the admin user
## Code After:
source /etc/mailinabox.conf # load global vars
ADMIN=$(./mail.py user admins | head -n 1)
test -z "$1" || ADMIN=$1
echo I am going to unlock admin features for $ADMIN.
echo You can provide another user to unlock as the first argument of this script.
echo
echo WARNING: you could break mail-in-a-box when fiddling around with owncloud\'s admin interface
echo If in doubt, press CTRL-C to cancel.
echo
echo Press enter to continue.
read
sqlite3 $STORAGE_ROOT/owncloud/owncloud.db "INSERT OR IGNORE INTO oc_group_user VALUES ('admin', '$ADMIN')" && echo Done.
|
source /etc/mailinabox.conf # load global vars
- ADMIN=$(sqlite3 $STORAGE_ROOT/mail/users.sqlite "SELECT email FROM users WHERE privileges = 'admin' ORDER BY id ASC LIMIT 1")
+ ADMIN=$(./mail.py user admins | head -n 1)
test -z "$1" || ADMIN=$1
echo I am going to unlock admin features for $ADMIN.
echo You can provide another user to unlock as the first argument of this script.
echo
echo WARNING: you could break mail-in-a-box when fiddling around with owncloud\'s admin interface
echo If in doubt, press CTRL-C to cancel.
echo
echo Press enter to continue.
read
sqlite3 $STORAGE_ROOT/owncloud/owncloud.db "INSERT OR IGNORE INTO oc_group_user VALUES ('admin', '$ADMIN')" && echo Done. | 2 | 0.125 | 1 | 1 |
6dd1030371c2a7e8b9c69b29042ea72988e9b2ff | core/env/init.lua | core/env/init.lua | require('core.env.strict')
require('core.env.fenv')
require('core.env.table')
| require('core.env.strict')
require('core.env.fenv')
require('core.env.table')
_assert = assert
function assert(v, ...)
if not v then
local args = table.pack(...)
error(table.concat(args, ' ', 1, args.n), 2)
end
return v
end
| Patch assert(). Allow multiple parameters. | Patch assert(). Allow multiple parameters.
| Lua | mit | nehz/slick,nehz/slick,nehz/slick | lua | ## Code Before:
require('core.env.strict')
require('core.env.fenv')
require('core.env.table')
## Instruction:
Patch assert(). Allow multiple parameters.
## Code After:
require('core.env.strict')
require('core.env.fenv')
require('core.env.table')
_assert = assert
function assert(v, ...)
if not v then
local args = table.pack(...)
error(table.concat(args, ' ', 1, args.n), 2)
end
return v
end
| require('core.env.strict')
require('core.env.fenv')
require('core.env.table')
+
+ _assert = assert
+ function assert(v, ...)
+ if not v then
+ local args = table.pack(...)
+ error(table.concat(args, ' ', 1, args.n), 2)
+ end
+ return v
+ end | 9 | 2.25 | 9 | 0 |
840507298a31393dd467b99260655fc4c8864df8 | package.json | package.json | {
"name": "AptoGen-Website",
"version": "0.0.1",
"description": "AptoGen website server",
"main": "server.js",
"repository": "git@github.com:AptoGen/AptoGen-Website.git",
"author": "Karlo Luis Martinez Martos <karlo.luis.m@gmail.com>",
"license": "MIT",
"scripts": {
"dev": "next",
"build": "next build",
"start": "next start"
},
"dependencies": {
"babel-plugin-styled-components": "^1.1.5",
"next": "latest",
"prop-types": "^15.6.0",
"react": "^16.0.0",
"react-addons-css-transition-group": "^15.4.2",
"react-addons-transition-group": "^15.4.2",
"react-dom": "^16.0.0",
"react-md": "^1.0.1",
"styled-components": "^2.1.0"
},
"devDependencies": {
"babel-eslint": "^8.0.1",
"eslint": "^4.9.0",
"eslint-config-airbnb": "^16.1.0",
"eslint-config-airbnb-base": "^12.1.0",
"eslint-plugin-import": "^2.7.0",
"eslint-plugin-react": "^7.4.0",
"storybook": "^1.0.0"
}
}
| {
"name": "AptoGen-Website",
"version": "0.0.1",
"description": "AptoGen website server",
"main": "server.js",
"repository": "git@github.com:AptoGen/AptoGen-Website.git",
"author": "Karlo Luis Martinez Martos <karlo.luis.m@gmail.com>",
"license": "MIT",
"scripts": {
"dev": "next",
"build": "next build",
"start": "next build && next start"
},
"dependencies": {
"babel-plugin-styled-components": "^1.1.5",
"next": "latest",
"prop-types": "^15.6.0",
"react": "^16.0.0",
"react-addons-css-transition-group": "^15.4.2",
"react-addons-transition-group": "^15.4.2",
"react-dom": "^16.0.0",
"react-md": "^1.0.1",
"styled-components": "^2.1.0"
},
"devDependencies": {
"babel-eslint": "^8.0.1",
"eslint": "^4.9.0",
"eslint-config-airbnb": "^16.1.0",
"eslint-config-airbnb-base": "^12.1.0",
"eslint-plugin-import": "^2.7.0",
"eslint-plugin-react": "^7.4.0",
"storybook": "^1.0.0"
}
}
| Add build to start script. | Add build to start script.
| JSON | mit | AptoGen/AptoGen-Website | json | ## Code Before:
{
"name": "AptoGen-Website",
"version": "0.0.1",
"description": "AptoGen website server",
"main": "server.js",
"repository": "git@github.com:AptoGen/AptoGen-Website.git",
"author": "Karlo Luis Martinez Martos <karlo.luis.m@gmail.com>",
"license": "MIT",
"scripts": {
"dev": "next",
"build": "next build",
"start": "next start"
},
"dependencies": {
"babel-plugin-styled-components": "^1.1.5",
"next": "latest",
"prop-types": "^15.6.0",
"react": "^16.0.0",
"react-addons-css-transition-group": "^15.4.2",
"react-addons-transition-group": "^15.4.2",
"react-dom": "^16.0.0",
"react-md": "^1.0.1",
"styled-components": "^2.1.0"
},
"devDependencies": {
"babel-eslint": "^8.0.1",
"eslint": "^4.9.0",
"eslint-config-airbnb": "^16.1.0",
"eslint-config-airbnb-base": "^12.1.0",
"eslint-plugin-import": "^2.7.0",
"eslint-plugin-react": "^7.4.0",
"storybook": "^1.0.0"
}
}
## Instruction:
Add build to start script.
## Code After:
{
"name": "AptoGen-Website",
"version": "0.0.1",
"description": "AptoGen website server",
"main": "server.js",
"repository": "git@github.com:AptoGen/AptoGen-Website.git",
"author": "Karlo Luis Martinez Martos <karlo.luis.m@gmail.com>",
"license": "MIT",
"scripts": {
"dev": "next",
"build": "next build",
"start": "next build && next start"
},
"dependencies": {
"babel-plugin-styled-components": "^1.1.5",
"next": "latest",
"prop-types": "^15.6.0",
"react": "^16.0.0",
"react-addons-css-transition-group": "^15.4.2",
"react-addons-transition-group": "^15.4.2",
"react-dom": "^16.0.0",
"react-md": "^1.0.1",
"styled-components": "^2.1.0"
},
"devDependencies": {
"babel-eslint": "^8.0.1",
"eslint": "^4.9.0",
"eslint-config-airbnb": "^16.1.0",
"eslint-config-airbnb-base": "^12.1.0",
"eslint-plugin-import": "^2.7.0",
"eslint-plugin-react": "^7.4.0",
"storybook": "^1.0.0"
}
}
| {
"name": "AptoGen-Website",
"version": "0.0.1",
"description": "AptoGen website server",
"main": "server.js",
"repository": "git@github.com:AptoGen/AptoGen-Website.git",
"author": "Karlo Luis Martinez Martos <karlo.luis.m@gmail.com>",
"license": "MIT",
"scripts": {
"dev": "next",
"build": "next build",
- "start": "next start"
+ "start": "next build && next start"
? ++++++++++++++
},
"dependencies": {
"babel-plugin-styled-components": "^1.1.5",
"next": "latest",
"prop-types": "^15.6.0",
"react": "^16.0.0",
"react-addons-css-transition-group": "^15.4.2",
"react-addons-transition-group": "^15.4.2",
"react-dom": "^16.0.0",
"react-md": "^1.0.1",
"styled-components": "^2.1.0"
},
"devDependencies": {
"babel-eslint": "^8.0.1",
"eslint": "^4.9.0",
"eslint-config-airbnb": "^16.1.0",
"eslint-config-airbnb-base": "^12.1.0",
"eslint-plugin-import": "^2.7.0",
"eslint-plugin-react": "^7.4.0",
"storybook": "^1.0.0"
}
} | 2 | 0.058824 | 1 | 1 |
971a19379678c7e24ea4a0d127b26b88064db0b8 | examples/hello-world/README.md | examples/hello-world/README.md |
This is a single file demo of cherrytree. It's a very simple static twitter like app. It's simple to keep the code short and just show how to get started.
```
npm install
npm run bundle
npm start
```
|
This is a single file demo of cherrytree. It's a very simple static twitter like app. It's simple to keep the code short and just show how to get started.
```
npm install
npm run bundle
npm start
```
After running `npm start` you can navigate to default [localhost](http://localhost:8000/webpack-dev-server/bundle) to see the example.
| Add step by step explanation | Add step by step explanation
| Markdown | mit | QubitProducts/cherrytree,QubitProducts/cherrytree,nathanboktae/cherrytree,nathanboktae/cherrytree | markdown | ## Code Before:
This is a single file demo of cherrytree. It's a very simple static twitter like app. It's simple to keep the code short and just show how to get started.
```
npm install
npm run bundle
npm start
```
## Instruction:
Add step by step explanation
## Code After:
This is a single file demo of cherrytree. It's a very simple static twitter like app. It's simple to keep the code short and just show how to get started.
```
npm install
npm run bundle
npm start
```
After running `npm start` you can navigate to default [localhost](http://localhost:8000/webpack-dev-server/bundle) to see the example.
|
This is a single file demo of cherrytree. It's a very simple static twitter like app. It's simple to keep the code short and just show how to get started.
```
npm install
npm run bundle
npm start
```
+
+ After running `npm start` you can navigate to default [localhost](http://localhost:8000/webpack-dev-server/bundle) to see the example. | 2 | 0.25 | 2 | 0 |
c1f664146f3f7e3cd65f018f01119425c181dd76 | functions/__fish_history.fish | functions/__fish_history.fish | function __fish_history
if test -e ~/.config/fish/fish_history
tac ~/.config/fish/fish_history | grep "^\- cmd: " | sed 's/\- cmd: //'
else if test -e ~/.local/share/fish/fish_history
tac ~/.local/share/fish/fish_history | grep "^\- cmd: " | sed 's/\- cmd: //'
else
history
end
end
| function __fish_history
set -l QUERY_CMD
if type -q tac
set QUERY_CMD "tac"
else
set QUERY_CMD "tail -r"
end
if test -e ~/.config/fish/fish_history
eval $QUERY_CMD ~/.config/fish/fish_history | grep "^\- cmd: " | sed 's/\- cmd: //'
else if test -e ~/.local/share/fish/fish_history
eval $QUERY_CMD ~/.local/share/fish/fish_history | grep "^\- cmd: " | sed 's/\- cmd: //'
else
history
end
end
| Check if tac exists, if not use tail -r | Check if tac exists, if not use tail -r
| fish | mit | fisherman/fzf | fish | ## Code Before:
function __fish_history
if test -e ~/.config/fish/fish_history
tac ~/.config/fish/fish_history | grep "^\- cmd: " | sed 's/\- cmd: //'
else if test -e ~/.local/share/fish/fish_history
tac ~/.local/share/fish/fish_history | grep "^\- cmd: " | sed 's/\- cmd: //'
else
history
end
end
## Instruction:
Check if tac exists, if not use tail -r
## Code After:
function __fish_history
set -l QUERY_CMD
if type -q tac
set QUERY_CMD "tac"
else
set QUERY_CMD "tail -r"
end
if test -e ~/.config/fish/fish_history
eval $QUERY_CMD ~/.config/fish/fish_history | grep "^\- cmd: " | sed 's/\- cmd: //'
else if test -e ~/.local/share/fish/fish_history
eval $QUERY_CMD ~/.local/share/fish/fish_history | grep "^\- cmd: " | sed 's/\- cmd: //'
else
history
end
end
| function __fish_history
+ set -l QUERY_CMD
+ if type -q tac
+ set QUERY_CMD "tac"
+ else
+ set QUERY_CMD "tail -r"
+ end
+
if test -e ~/.config/fish/fish_history
- tac ~/.config/fish/fish_history | grep "^\- cmd: " | sed 's/\- cmd: //'
? ^ ^
+ eval $QUERY_CMD ~/.config/fish/fish_history | grep "^\- cmd: " | sed 's/\- cmd: //'
? ^^ ^^^^^^^^^^^^
else if test -e ~/.local/share/fish/fish_history
- tac ~/.local/share/fish/fish_history | grep "^\- cmd: " | sed 's/\- cmd: //'
? ^ ^
+ eval $QUERY_CMD ~/.local/share/fish/fish_history | grep "^\- cmd: " | sed 's/\- cmd: //'
? ^^ ^^^^^^^^^^^^
else
history
end
end | 11 | 1.222222 | 9 | 2 |
b0f04ebf6443e9f7d6eb892f2b9b61c04a5165c1 | README.md | README.md | python-gearbox
==============
.. 
.. 
Python library for design of spur and helical gears transmissions
###Install
the project is on *pypi* server for installation use:
*`pip install python-gearbox`*
for manual installation download the last release from:
https://pypi.python.org/pypi/python-gearbox
`python setup.py install`
make shure you have installed all dependencies `jinja2, numpy and scipy`
###Features
- Gear stresses calculation using AGMA-2101 D04 and ISO-6336 standards
- Export 2D and 3D one tooth geometrical models, to *MATLAB/COMSOL* for spur and helical gears
- Export 2D and 3D one tooth geometrical models, to *ABAQUS-CAE* only for spur gears
- Export 2D and 3D one tooth geometrical models, to *ANSYS* only for spur gears
###Next Releases
- Add the export capabilities for helical gears in ANSYS and ABAQUS-CAE softwares
- Optimization of the profile shift modification
###Help and Documentation
The documentation isn't ready yet, see the demo.py file for uses example
###Contribute
if you want to contribute with the project
**e-mail**: efirvida@gmail.com | python-gearbox
==============
.. 
.. 
Python library for design of spur and helical gears transmissions
###Install
the project is on *pypi* server for installation use:
*`pip install python-gearbox`*
for manual installation download the last release from:
https://pypi.python.org/pypi/python-gearbox
`python setup.py install`
make shure you have installed all dependencies `jinja2, numpy and scipy`
###Features
- Gear stresses calculation using AGMA-2101 D04 and ISO-6336 standards
- Optimization of the profile shift modification
- Export 2D and 3D one tooth geometrical models, to *MATLAB/COMSOL* for spur and helical gears
- Export 2D and 3D one tooth geometrical models, to *ABAQUS-CAE* only for spur gears
- Export 2D and 3D one tooth geometrical models, to *ANSYS* only for spur gears
###Next Releases
- Add the export capabilities for helical gears in ANSYS and ABAQUS-CAE softwares
###Help and Documentation
The documentation isn't ready yet, see the demo.py file for uses example
###Contribute
if you want to contribute with the project
**e-mail**: efirvida@gmail.com | Add addendum optimization for bending and pitting, and minor styles fixes | Add addendum optimization for bending and pitting, and minor styles fixes
| Markdown | mit | efirvida/python-gearbox,mjfwest/python-gearbox | markdown | ## Code Before:
python-gearbox
==============
.. 
.. 
Python library for design of spur and helical gears transmissions
###Install
the project is on *pypi* server for installation use:
*`pip install python-gearbox`*
for manual installation download the last release from:
https://pypi.python.org/pypi/python-gearbox
`python setup.py install`
make shure you have installed all dependencies `jinja2, numpy and scipy`
###Features
- Gear stresses calculation using AGMA-2101 D04 and ISO-6336 standards
- Export 2D and 3D one tooth geometrical models, to *MATLAB/COMSOL* for spur and helical gears
- Export 2D and 3D one tooth geometrical models, to *ABAQUS-CAE* only for spur gears
- Export 2D and 3D one tooth geometrical models, to *ANSYS* only for spur gears
###Next Releases
- Add the export capabilities for helical gears in ANSYS and ABAQUS-CAE softwares
- Optimization of the profile shift modification
###Help and Documentation
The documentation isn't ready yet, see the demo.py file for uses example
###Contribute
if you want to contribute with the project
**e-mail**: efirvida@gmail.com
## Instruction:
Add addendum optimization for bending and pitting, and minor styles fixes
## Code After:
python-gearbox
==============
.. 
.. 
Python library for design of spur and helical gears transmissions
###Install
the project is on *pypi* server for installation use:
*`pip install python-gearbox`*
for manual installation download the last release from:
https://pypi.python.org/pypi/python-gearbox
`python setup.py install`
make shure you have installed all dependencies `jinja2, numpy and scipy`
###Features
- Gear stresses calculation using AGMA-2101 D04 and ISO-6336 standards
- Optimization of the profile shift modification
- Export 2D and 3D one tooth geometrical models, to *MATLAB/COMSOL* for spur and helical gears
- Export 2D and 3D one tooth geometrical models, to *ABAQUS-CAE* only for spur gears
- Export 2D and 3D one tooth geometrical models, to *ANSYS* only for spur gears
###Next Releases
- Add the export capabilities for helical gears in ANSYS and ABAQUS-CAE softwares
###Help and Documentation
The documentation isn't ready yet, see the demo.py file for uses example
###Contribute
if you want to contribute with the project
**e-mail**: efirvida@gmail.com | python-gearbox
==============
.. 
.. 
Python library for design of spur and helical gears transmissions
###Install
the project is on *pypi* server for installation use:
*`pip install python-gearbox`*
for manual installation download the last release from:
https://pypi.python.org/pypi/python-gearbox
`python setup.py install`
make shure you have installed all dependencies `jinja2, numpy and scipy`
###Features
- Gear stresses calculation using AGMA-2101 D04 and ISO-6336 standards
+ - Optimization of the profile shift modification
- Export 2D and 3D one tooth geometrical models, to *MATLAB/COMSOL* for spur and helical gears
- Export 2D and 3D one tooth geometrical models, to *ABAQUS-CAE* only for spur gears
- Export 2D and 3D one tooth geometrical models, to *ANSYS* only for spur gears
###Next Releases
- Add the export capabilities for helical gears in ANSYS and ABAQUS-CAE softwares
- - Optimization of the profile shift modification
###Help and Documentation
The documentation isn't ready yet, see the demo.py file for uses example
###Contribute
if you want to contribute with the project
**e-mail**: efirvida@gmail.com | 2 | 0.054054 | 1 | 1 |
055f8c4c6b912026ed51fd20c46bbc37f90eff84 | requirements.txt | requirements.txt | cairocffi==1.0.2; python_version>='3.5'
hiredis==1.0.0
mysqlclient==1.4.2.post1
psycopg2==2.7.7
psycopg2cffi==2.8.1
PyStemmer==1.3.0; python_version<'3.7'
python_axolotl_curve25519==0.4.1.post2
rcssmin==1.0.6
regex==2019.03.12
rjsmin==1.1.0
subprocess32==3.5.2; python_version<'3'
# https://github.com/eleme/thriftpy/issues/333
thriftpy==0.3.9; python_version<'3.7'
Twisted==18.9.0
ujson==1.35
uWSGI==2.0.18
| cairocffi==1.0.2; python_version>='3.5'
hiredis==1.0.0
mysqlclient==1.4.2.post1
psycopg2==2.7.7
psycopg2cffi==2.8.1
PyStemmer==1.3.0; python_version<'3.7'
python_axolotl_curve25519==0.4.1.post2
rcssmin==1.0.6
regex==2019.03.12
rjsmin==1.1.0
subprocess32==3.5.2; python_version<'3'
Twisted==18.9.0
ujson==1.35
uWSGI==2.0.18
| Remove thriftpy (obsolete package, doesn't seem to be used) | Remove thriftpy (obsolete package, doesn't seem to be used)
| Text | bsd-3-clause | praekeltfoundation/debian-wheel-mirror | text | ## Code Before:
cairocffi==1.0.2; python_version>='3.5'
hiredis==1.0.0
mysqlclient==1.4.2.post1
psycopg2==2.7.7
psycopg2cffi==2.8.1
PyStemmer==1.3.0; python_version<'3.7'
python_axolotl_curve25519==0.4.1.post2
rcssmin==1.0.6
regex==2019.03.12
rjsmin==1.1.0
subprocess32==3.5.2; python_version<'3'
# https://github.com/eleme/thriftpy/issues/333
thriftpy==0.3.9; python_version<'3.7'
Twisted==18.9.0
ujson==1.35
uWSGI==2.0.18
## Instruction:
Remove thriftpy (obsolete package, doesn't seem to be used)
## Code After:
cairocffi==1.0.2; python_version>='3.5'
hiredis==1.0.0
mysqlclient==1.4.2.post1
psycopg2==2.7.7
psycopg2cffi==2.8.1
PyStemmer==1.3.0; python_version<'3.7'
python_axolotl_curve25519==0.4.1.post2
rcssmin==1.0.6
regex==2019.03.12
rjsmin==1.1.0
subprocess32==3.5.2; python_version<'3'
Twisted==18.9.0
ujson==1.35
uWSGI==2.0.18
| cairocffi==1.0.2; python_version>='3.5'
hiredis==1.0.0
mysqlclient==1.4.2.post1
psycopg2==2.7.7
psycopg2cffi==2.8.1
PyStemmer==1.3.0; python_version<'3.7'
python_axolotl_curve25519==0.4.1.post2
rcssmin==1.0.6
regex==2019.03.12
rjsmin==1.1.0
subprocess32==3.5.2; python_version<'3'
- # https://github.com/eleme/thriftpy/issues/333
- thriftpy==0.3.9; python_version<'3.7'
Twisted==18.9.0
ujson==1.35
uWSGI==2.0.18 | 2 | 0.125 | 0 | 2 |
64d8427a7b970ec3243179044870ddcf23dc2f09 | README.md | README.md | <img src="https://github.com/Larpon/QtFirebase/blob/master/logo.png" align="right"/>
# QtFirebaseExample
Example Qt app for the QtFirebase project
# Quick start
1. Clone the example app and the [QtFirebase](https://github.com/Larpon/QtFirebase) project
* **Clone example project**
```
cd /path/to/projects
git clone git@github.com:Larpon/QtFirebaseExample.git
```
* **Clone the QtFirebase project**
Clone into the "extensions" folder or into other folder of your choice
```
cd /path/to/projects/QtFirebaseExample/extensions
git clone git@github.com:Larpon/QtFirebase.git
```
2. Follow the instructions in [SETUP.md](https://github.com/Larpon/QtFirebase/blob/master/SETUP.md) on how to setup QtFirebase
| <img src="https://github.com/Larpon/QtFirebase/blob/master/docs/img/logo.png" align="right"/>
# QtFirebaseExample
Example Qt app for the QtFirebase project
# Quick start
1. Clone the example app and the [QtFirebase](https://github.com/Larpon/QtFirebase) project
* **Clone example project**
```
cd /path/to/projects
git clone git@github.com:Larpon/QtFirebaseExample.git
```
* **Clone the QtFirebase project**
Clone into the "extensions" folder or into other folder of your choice
```
cd /path/to/projects/QtFirebaseExample/extensions
git clone git@github.com:Larpon/QtFirebase.git
```
2. Follow the instructions in [SETUP.md](https://github.com/Larpon/QtFirebase/blob/master/docs/SETUP.md) on how to setup QtFirebase
| Update references to moved files in QtFirebase project | Update references to moved files in QtFirebase project
| Markdown | mit | Larpon/QtFirebaseExample,Larpon/QtFirebaseExample | markdown | ## Code Before:
<img src="https://github.com/Larpon/QtFirebase/blob/master/logo.png" align="right"/>
# QtFirebaseExample
Example Qt app for the QtFirebase project
# Quick start
1. Clone the example app and the [QtFirebase](https://github.com/Larpon/QtFirebase) project
* **Clone example project**
```
cd /path/to/projects
git clone git@github.com:Larpon/QtFirebaseExample.git
```
* **Clone the QtFirebase project**
Clone into the "extensions" folder or into other folder of your choice
```
cd /path/to/projects/QtFirebaseExample/extensions
git clone git@github.com:Larpon/QtFirebase.git
```
2. Follow the instructions in [SETUP.md](https://github.com/Larpon/QtFirebase/blob/master/SETUP.md) on how to setup QtFirebase
## Instruction:
Update references to moved files in QtFirebase project
## Code After:
<img src="https://github.com/Larpon/QtFirebase/blob/master/docs/img/logo.png" align="right"/>
# QtFirebaseExample
Example Qt app for the QtFirebase project
# Quick start
1. Clone the example app and the [QtFirebase](https://github.com/Larpon/QtFirebase) project
* **Clone example project**
```
cd /path/to/projects
git clone git@github.com:Larpon/QtFirebaseExample.git
```
* **Clone the QtFirebase project**
Clone into the "extensions" folder or into other folder of your choice
```
cd /path/to/projects/QtFirebaseExample/extensions
git clone git@github.com:Larpon/QtFirebase.git
```
2. Follow the instructions in [SETUP.md](https://github.com/Larpon/QtFirebase/blob/master/docs/SETUP.md) on how to setup QtFirebase
| - <img src="https://github.com/Larpon/QtFirebase/blob/master/logo.png" align="right"/>
+ <img src="https://github.com/Larpon/QtFirebase/blob/master/docs/img/logo.png" align="right"/>
? +++++++++
# QtFirebaseExample
Example Qt app for the QtFirebase project
# Quick start
1. Clone the example app and the [QtFirebase](https://github.com/Larpon/QtFirebase) project
* **Clone example project**
```
cd /path/to/projects
git clone git@github.com:Larpon/QtFirebaseExample.git
```
* **Clone the QtFirebase project**
Clone into the "extensions" folder or into other folder of your choice
```
cd /path/to/projects/QtFirebaseExample/extensions
git clone git@github.com:Larpon/QtFirebase.git
```
- 2. Follow the instructions in [SETUP.md](https://github.com/Larpon/QtFirebase/blob/master/SETUP.md) on how to setup QtFirebase
+ 2. Follow the instructions in [SETUP.md](https://github.com/Larpon/QtFirebase/blob/master/docs/SETUP.md) on how to setup QtFirebase
? +++++
| 4 | 0.153846 | 2 | 2 |
9a2438c1274abf22c21d42a04450bbb9e733deb5 | roles/openshift_provisioners/tasks/install_support.yaml | roles/openshift_provisioners/tasks/install_support.yaml | ---
- name: Check for provisioners project already exists
command: >
{{ openshift.common.client_binary }} --config={{ mktemp.stdout }}/admin.kubeconfig get project {{openshift_provisioners_project}} --no-headers
register: provisioners_project_result
ignore_errors: yes
when: not ansible_check_mode
changed_when: no
- name: Create provisioners project
command: >
{{ openshift.common.admin_binary }} --config={{ mktemp.stdout }}/admin.kubeconfig new-project {{openshift_provisioners_project}}
when: not ansible_check_mode and "not found" in provisioners_project_result.stderr
- name: Create temp directory for all our templates
file: path={{mktemp.stdout}}/templates state=directory mode=0755
changed_when: False
check_mode: no
- include: generate_secrets.yaml
- include: generate_clusterrolebindings.yaml
- include: generate_serviceaccounts.yaml
| ---
- name: Set provisioners project
oc_project:
state: present
kubeconfig: "{{ mktemp.stdout }}/admin.kubeconfig"
name: "{{ openshift_provisioners_project }}"
- name: Create temp directory for all our templates
file: path={{mktemp.stdout}}/templates state=directory mode=0755
changed_when: False
check_mode: no
- include: generate_secrets.yaml
- include: generate_clusterrolebindings.yaml
- include: generate_serviceaccounts.yaml
| Use oc_project to ensure openshift_provisioners_project present | Use oc_project to ensure openshift_provisioners_project present
| YAML | apache-2.0 | sosiouxme/openshift-ansible,anpingli/openshift-ansible,rjhowe/openshift-ansible,rjhowe/openshift-ansible,liggitt/openshift-ansible,maxamillion/openshift-ansible,twiest/openshift-ansible,sdodson/openshift-ansible,aveshagarwal/openshift-ansible,sosiouxme/openshift-ansible,aveshagarwal/openshift-ansible,tagliateller/openshift-ansible,miminar/openshift-ansible,twiest/openshift-ansible,akram/openshift-ansible,markllama/openshift-ansible,wbrefvem/openshift-ansible,mwoodson/openshift-ansible,jwhonce/openshift-ansible,wbrefvem/openshift-ansible,liggitt/openshift-ansible,abutcher/openshift-ansible,bparees/openshift-ansible,rjhowe/openshift-ansible,twiest/openshift-ansible,rhdedgar/openshift-ansible,akubicharm/openshift-ansible,jwhonce/openshift-ansible,akubicharm/openshift-ansible,zhiwliu/openshift-ansible,nak3/openshift-ansible,sdodson/openshift-ansible,ewolinetz/openshift-ansible,sdodson/openshift-ansible,maxamillion/openshift-ansible,gburges/openshift-ansible,gburges/openshift-ansible,liggitt/openshift-ansible,miminar/openshift-ansible,markllama/openshift-ansible,twiest/openshift-ansible,kwoodson/openshift-ansible,jwhonce/openshift-ansible,aveshagarwal/openshift-ansible,maxamillion/openshift-ansible,markllama/openshift-ansible,markllama/openshift-ansible,rhdedgar/openshift-ansible,abutcher/openshift-ansible,sosiouxme/openshift-ansible,zhiwliu/openshift-ansible,sdodson/openshift-ansible,sdodson/openshift-ansible,akubicharm/openshift-ansible,tagliateller/openshift-ansible,aveshagarwal/openshift-ansible,mwoodson/openshift-ansible,markllama/openshift-ansible,wbrefvem/openshift-ansible,openshift/openshift-ansible,aveshagarwal/openshift-ansible,miminar/openshift-ansible,tagliateller/openshift-ansible,liggitt/openshift-ansible,rjhowe/openshift-ansible,zhiwliu/openshift-ansible,akram/openshift-ansible,bparees/openshift-ansible,maxamillion/openshift-ansible,ewolinetz/openshift-ansible,wbrefvem/openshift-ansible,abutcher/openshift-ansible,jwhonce/openshift-ansible,miminar/openshift-ansible,tagliateller/openshift-ansible,zhiwliu/openshift-ansible,sosiouxme/openshift-ansible,sosiouxme/openshift-ansible,jwhonce/openshift-ansible,akubicharm/openshift-ansible,akubicharm/openshift-ansible,liggitt/openshift-ansible,abutcher/openshift-ansible,ewolinetz/openshift-ansible,abutcher/openshift-ansible,rjhowe/openshift-ansible,tagliateller/openshift-ansible,kwoodson/openshift-ansible,nak3/openshift-ansible,openshift/openshift-ansible,zhiwliu/openshift-ansible,twiest/openshift-ansible,maxamillion/openshift-ansible,anpingli/openshift-ansible,wbrefvem/openshift-ansible,miminar/openshift-ansible,ewolinetz/openshift-ansible,ewolinetz/openshift-ansible | yaml | ## Code Before:
---
- name: Check for provisioners project already exists
command: >
{{ openshift.common.client_binary }} --config={{ mktemp.stdout }}/admin.kubeconfig get project {{openshift_provisioners_project}} --no-headers
register: provisioners_project_result
ignore_errors: yes
when: not ansible_check_mode
changed_when: no
- name: Create provisioners project
command: >
{{ openshift.common.admin_binary }} --config={{ mktemp.stdout }}/admin.kubeconfig new-project {{openshift_provisioners_project}}
when: not ansible_check_mode and "not found" in provisioners_project_result.stderr
- name: Create temp directory for all our templates
file: path={{mktemp.stdout}}/templates state=directory mode=0755
changed_when: False
check_mode: no
- include: generate_secrets.yaml
- include: generate_clusterrolebindings.yaml
- include: generate_serviceaccounts.yaml
## Instruction:
Use oc_project to ensure openshift_provisioners_project present
## Code After:
---
- name: Set provisioners project
oc_project:
state: present
kubeconfig: "{{ mktemp.stdout }}/admin.kubeconfig"
name: "{{ openshift_provisioners_project }}"
- name: Create temp directory for all our templates
file: path={{mktemp.stdout}}/templates state=directory mode=0755
changed_when: False
check_mode: no
- include: generate_secrets.yaml
- include: generate_clusterrolebindings.yaml
- include: generate_serviceaccounts.yaml
| ---
- - name: Check for provisioners project already exists
- command: >
- {{ openshift.common.client_binary }} --config={{ mktemp.stdout }}/admin.kubeconfig get project {{openshift_provisioners_project}} --no-headers
- register: provisioners_project_result
- ignore_errors: yes
- when: not ansible_check_mode
- changed_when: no
-
- - name: Create provisioners project
? ^^ - -
+ - name: Set provisioners project
? ^
- command: >
- {{ openshift.common.admin_binary }} --config={{ mktemp.stdout }}/admin.kubeconfig new-project {{openshift_provisioners_project}}
- when: not ansible_check_mode and "not found" in provisioners_project_result.stderr
+ oc_project:
+ state: present
+ kubeconfig: "{{ mktemp.stdout }}/admin.kubeconfig"
+ name: "{{ openshift_provisioners_project }}"
- name: Create temp directory for all our templates
file: path={{mktemp.stdout}}/templates state=directory mode=0755
changed_when: False
check_mode: no
- include: generate_secrets.yaml
- include: generate_clusterrolebindings.yaml
- include: generate_serviceaccounts.yaml | 17 | 0.708333 | 5 | 12 |
6fce42f56c4a73c15d17a9cf9d652731f7fdbc7c | application/libraries/core/class.m2datetime.inc.php | application/libraries/core/class.m2datetime.inc.php | <?php
/**
* Date/Time related functions
* @author M2Mobi, Heinz Wiesinger
*/
class M2DateTime
{
/**
* Constructor
*/
public function __construct()
{
}
/**
* Destructor
*/
public function __destruct()
{
}
/**
* Return today's date (YYYY-MM-DD)
* @return String Today's date
*/
public static function today()
{
return date('Y-m-d');
}
/**
* Return tomorrow's date (YYYY-MM-DD)
* @return String Tomorrow's date
*/
public static function tomorrow()
{
return date('Y-m-d', strtotime("+1 day"));
}
/**
* Return the current time (HH:MM:SS)
* @return String current time
*/
public static function now()
{
return strftime("%H:%M:%S", time());
}
}
?> | <?php
/**
* Date/Time related functions
* @author M2Mobi, Heinz Wiesinger
*/
class M2DateTime
{
/**
* Constructor
*/
public function __construct()
{
}
/**
* Destructor
*/
public function __destruct()
{
}
/**
* Return today's date (YYYY-MM-DD)
* @return String Today's date
*/
public static function today()
{
return date('Y-m-d');
}
/**
* Return tomorrow's date (YYYY-MM-DD)
* @return String Tomorrow's date
*/
public static function tomorrow()
{
return date('Y-m-d', strtotime("+1 day"));
}
/**
* Return the current time (HH:MM:SS)
* @return String current time
*/
public static function now()
{
return strftime("%H:%M:%S", time());
}
/**
* Returns a MySQL compatible date definition
* @param Integer $timestamp PHP-like Unix Timestamp
* @return String $date Date as a string
*/
public static function get_date($timestamp)
{
return date('Y-m-d', $timestamp);
}
/**
* Returns a MySQL compatible time definition
* @param Integer $timestamp PHP-like Unix Timestamp
* @return String $time Time as a string
*/
public static function get_time($timestamp)
{
return strftime("%H:%M:%S", $timestamp);
}
}
?> | Add new static methods to M2DateTime class | application: Add new static methods to M2DateTime class
-) get_date() for returning a MySQL compatible date string
-) get_time() for returning a MySQL compatible time string
| PHP | mit | tardypad/lunr,tardypad/lunr,M2Mobi/lunr,tardypad/lunr,pprkut/lunr,pprkut/lunr,pprkut/lunr,M2Mobi/lunr,M2Mobi/lunr | php | ## Code Before:
<?php
/**
* Date/Time related functions
* @author M2Mobi, Heinz Wiesinger
*/
class M2DateTime
{
/**
* Constructor
*/
public function __construct()
{
}
/**
* Destructor
*/
public function __destruct()
{
}
/**
* Return today's date (YYYY-MM-DD)
* @return String Today's date
*/
public static function today()
{
return date('Y-m-d');
}
/**
* Return tomorrow's date (YYYY-MM-DD)
* @return String Tomorrow's date
*/
public static function tomorrow()
{
return date('Y-m-d', strtotime("+1 day"));
}
/**
* Return the current time (HH:MM:SS)
* @return String current time
*/
public static function now()
{
return strftime("%H:%M:%S", time());
}
}
?>
## Instruction:
application: Add new static methods to M2DateTime class
-) get_date() for returning a MySQL compatible date string
-) get_time() for returning a MySQL compatible time string
## Code After:
<?php
/**
* Date/Time related functions
* @author M2Mobi, Heinz Wiesinger
*/
class M2DateTime
{
/**
* Constructor
*/
public function __construct()
{
}
/**
* Destructor
*/
public function __destruct()
{
}
/**
* Return today's date (YYYY-MM-DD)
* @return String Today's date
*/
public static function today()
{
return date('Y-m-d');
}
/**
* Return tomorrow's date (YYYY-MM-DD)
* @return String Tomorrow's date
*/
public static function tomorrow()
{
return date('Y-m-d', strtotime("+1 day"));
}
/**
* Return the current time (HH:MM:SS)
* @return String current time
*/
public static function now()
{
return strftime("%H:%M:%S", time());
}
/**
* Returns a MySQL compatible date definition
* @param Integer $timestamp PHP-like Unix Timestamp
* @return String $date Date as a string
*/
public static function get_date($timestamp)
{
return date('Y-m-d', $timestamp);
}
/**
* Returns a MySQL compatible time definition
* @param Integer $timestamp PHP-like Unix Timestamp
* @return String $time Time as a string
*/
public static function get_time($timestamp)
{
return strftime("%H:%M:%S", $timestamp);
}
}
?> | <?php
/**
* Date/Time related functions
* @author M2Mobi, Heinz Wiesinger
*/
class M2DateTime
{
/**
* Constructor
*/
public function __construct()
{
}
/**
* Destructor
*/
public function __destruct()
{
}
/**
* Return today's date (YYYY-MM-DD)
* @return String Today's date
*/
public static function today()
{
return date('Y-m-d');
}
/**
* Return tomorrow's date (YYYY-MM-DD)
* @return String Tomorrow's date
*/
public static function tomorrow()
{
return date('Y-m-d', strtotime("+1 day"));
}
/**
* Return the current time (HH:MM:SS)
* @return String current time
*/
public static function now()
{
return strftime("%H:%M:%S", time());
}
+ /**
+ * Returns a MySQL compatible date definition
+ * @param Integer $timestamp PHP-like Unix Timestamp
+ * @return String $date Date as a string
+ */
+ public static function get_date($timestamp)
+ {
+ return date('Y-m-d', $timestamp);
+ }
+
+ /**
+ * Returns a MySQL compatible time definition
+ * @param Integer $timestamp PHP-like Unix Timestamp
+ * @return String $time Time as a string
+ */
+ public static function get_time($timestamp)
+ {
+ return strftime("%H:%M:%S", $timestamp);
+ }
}
?> | 19 | 0.345455 | 19 | 0 |
caf245e14421472adb0668e57adf5a3e3ae68424 | scuba/utils.py | scuba/utils.py | try:
from shlex import quote as shell_quote
except ImportError:
from pipes import quote as shell_quote
def format_cmdline(args, maxwidth=80):
def lines():
line = ''
for a in (shell_quote(a) for a in args):
if len(line) + len(a) > maxwidth:
yield line
line = ''
line += ' ' + a
return ' \\\n'.join(lines())[1:]
| try:
from shlex import quote as shell_quote
except ImportError:
from pipes import quote as shell_quote
def format_cmdline(args, maxwidth=80):
'''Format args into a shell-quoted command line.
The result will be wrapped to maxwidth characters where possible,
not breaking a single long argument.
'''
# Leave room for the space and backslash at the end of each line
maxwidth -= 2
def lines():
line = ''
for a in (shell_quote(a) for a in args):
# If adding this argument will make the line too long,
# yield the current line, and start a new one.
if len(line) + len(a) + 1 > maxwidth:
yield line
line = ''
# Append this argument to the current line, separating
# it by a space from the existing arguments.
if line:
line += ' ' + a
else:
line = a
yield line
return ' \\\n'.join(lines())
| Fix missing final line from format_cmdline() | Fix missing final line from format_cmdline()
The previous code was missing 'yield line' after the for loop.
This commit fixes that, as well as the extra space at the beginning
of each line. Normally, we'd use str.join() to avoid such a problem,
but this code is accumulating the line manually, so we can't just join
the args together.
This fixes #41.
| Python | mit | JonathonReinhart/scuba,JonathonReinhart/scuba,JonathonReinhart/scuba | python | ## Code Before:
try:
from shlex import quote as shell_quote
except ImportError:
from pipes import quote as shell_quote
def format_cmdline(args, maxwidth=80):
def lines():
line = ''
for a in (shell_quote(a) for a in args):
if len(line) + len(a) > maxwidth:
yield line
line = ''
line += ' ' + a
return ' \\\n'.join(lines())[1:]
## Instruction:
Fix missing final line from format_cmdline()
The previous code was missing 'yield line' after the for loop.
This commit fixes that, as well as the extra space at the beginning
of each line. Normally, we'd use str.join() to avoid such a problem,
but this code is accumulating the line manually, so we can't just join
the args together.
This fixes #41.
## Code After:
try:
from shlex import quote as shell_quote
except ImportError:
from pipes import quote as shell_quote
def format_cmdline(args, maxwidth=80):
'''Format args into a shell-quoted command line.
The result will be wrapped to maxwidth characters where possible,
not breaking a single long argument.
'''
# Leave room for the space and backslash at the end of each line
maxwidth -= 2
def lines():
line = ''
for a in (shell_quote(a) for a in args):
# If adding this argument will make the line too long,
# yield the current line, and start a new one.
if len(line) + len(a) + 1 > maxwidth:
yield line
line = ''
# Append this argument to the current line, separating
# it by a space from the existing arguments.
if line:
line += ' ' + a
else:
line = a
yield line
return ' \\\n'.join(lines())
| try:
from shlex import quote as shell_quote
except ImportError:
from pipes import quote as shell_quote
+
def format_cmdline(args, maxwidth=80):
+ '''Format args into a shell-quoted command line.
+
+ The result will be wrapped to maxwidth characters where possible,
+ not breaking a single long argument.
+ '''
+
+ # Leave room for the space and backslash at the end of each line
+ maxwidth -= 2
+
def lines():
line = ''
for a in (shell_quote(a) for a in args):
+ # If adding this argument will make the line too long,
+ # yield the current line, and start a new one.
- if len(line) + len(a) > maxwidth:
+ if len(line) + len(a) + 1 > maxwidth:
? ++++
yield line
line = ''
- line += ' ' + a
+ # Append this argument to the current line, separating
+ # it by a space from the existing arguments.
+ if line:
+ line += ' ' + a
+ else:
+ line = a
+
+ yield line
+
- return ' \\\n'.join(lines())[1:]
? ----
+ return ' \\\n'.join(lines()) | 26 | 1.733333 | 23 | 3 |
94d273b6532f34f648870addeb7f7cfdd1f9e70a | features/step_definitions/preview_steps.rb | features/step_definitions/preview_steps.rb | Given(/^I have filled out the address information$/) do
visit "/"
click_link "Submit a New Restroom"
fill_in "restroom[name]", with: "Vancouver restroom"
fill_in "restroom[street]", with: "684 East Hastings"
fill_in "restroom[city]", with: "Vancouver"
fill_in "restroom[state]", with: "British Columbia"
find(:select, "Country").first(:option, "Canada").select_option
end
When(/^I click the preview button$/) do
find_button("Preview").trigger('click')
end
Then(/^I should see the map preview$/) do
expect(page).to have_css("div#mapArea", :visible => true)
end
| Given(/^I have filled out the address information$/) do
visit "/"
click_link "Submit a New Restroom"
fill_in "restroom[name]", with: "Vancouver restroom"
fill_in "restroom[street]", with: "684 East Hastings"
fill_in "restroom[city]", with: "Vancouver"
fill_in "restroom[state]", with: "British Columbia"
find(:select, "Country").first(:option, "Canada").select_option
end
When(/^I click the preview button$/) do
click_button "Preview"
end
Then(/^I should see the map preview$/) do
expect(page).to have_css("div#mapArea", :visible => true)
end
| Simplify statement in cucumber test steps | Simplify statement in cucumber test steps
Use click_button instead of find_button().trigger('click')
| Ruby | agpl-3.0 | RefugeRestrooms/refugerestrooms,RefugeRestrooms/refugerestrooms,RefugeRestrooms/refugerestrooms,RefugeRestrooms/refugerestrooms | ruby | ## Code Before:
Given(/^I have filled out the address information$/) do
visit "/"
click_link "Submit a New Restroom"
fill_in "restroom[name]", with: "Vancouver restroom"
fill_in "restroom[street]", with: "684 East Hastings"
fill_in "restroom[city]", with: "Vancouver"
fill_in "restroom[state]", with: "British Columbia"
find(:select, "Country").first(:option, "Canada").select_option
end
When(/^I click the preview button$/) do
find_button("Preview").trigger('click')
end
Then(/^I should see the map preview$/) do
expect(page).to have_css("div#mapArea", :visible => true)
end
## Instruction:
Simplify statement in cucumber test steps
Use click_button instead of find_button().trigger('click')
## Code After:
Given(/^I have filled out the address information$/) do
visit "/"
click_link "Submit a New Restroom"
fill_in "restroom[name]", with: "Vancouver restroom"
fill_in "restroom[street]", with: "684 East Hastings"
fill_in "restroom[city]", with: "Vancouver"
fill_in "restroom[state]", with: "British Columbia"
find(:select, "Country").first(:option, "Canada").select_option
end
When(/^I click the preview button$/) do
click_button "Preview"
end
Then(/^I should see the map preview$/) do
expect(page).to have_css("div#mapArea", :visible => true)
end
| Given(/^I have filled out the address information$/) do
visit "/"
click_link "Submit a New Restroom"
fill_in "restroom[name]", with: "Vancouver restroom"
fill_in "restroom[street]", with: "684 East Hastings"
fill_in "restroom[city]", with: "Vancouver"
fill_in "restroom[state]", with: "British Columbia"
find(:select, "Country").first(:option, "Canada").select_option
end
When(/^I click the preview button$/) do
- find_button("Preview").trigger('click')
+ click_button "Preview"
end
Then(/^I should see the map preview$/) do
expect(page).to have_css("div#mapArea", :visible => true)
end | 2 | 0.117647 | 1 | 1 |
71db89cad06dc0aa81e0a7178712e8beb7e7cb01 | turbustat/tests/test_cramer.py | turbustat/tests/test_cramer.py |
'''
Test functions for Cramer
'''
from unittest import TestCase
import numpy as np
import numpy.testing as npt
from ..statistics import Cramer_Distance
from ._testing_data import \
dataset1, dataset2, computed_data, computed_distances
class testCramer(TestCase):
def test_cramer(self):
self.tester = \
Cramer_Distance(dataset1["cube"],
dataset2["cube"],
noise_value1=0.1,
noise_value2=0.1).distance_metric(normalize=False)
npt.assert_allclose(self.tester.data_matrix1,
computed_data["cramer_val"])
npt.assert_almost_equal(self.tester.distance,
computed_distances['cramer_distance'])
def test_cramer_spatial_diff(self):
small_data = dataset1["cube"][0][:, :26, :26]
self.tester2 = Cramer_Distance(small_data, dataset2["cube"])
self.tester2.distance_metric(normalize=False)
self.tester3 = Cramer_Distance(dataset2["cube"], small_data)
self.tester3.distance_metric(normalize=False)
npt.assert_almost_equal(self.tester2.distance, self.tester3.distance)
|
'''
Test functions for Cramer
'''
import numpy.testing as npt
from ..statistics import Cramer_Distance
from ._testing_data import \
dataset1, dataset2, computed_data, computed_distances
def test_cramer():
tester = \
Cramer_Distance(dataset1["cube"],
dataset2["cube"],
noise_value1=0.1,
noise_value2=0.1).distance_metric(normalize=False)
npt.assert_allclose(tester.data_matrix1,
computed_data["cramer_val"])
npt.assert_almost_equal(tester.distance,
computed_distances['cramer_distance'])
def test_cramer_spatial_diff():
small_data = dataset1["cube"][0][:, :26, :26]
tester2 = Cramer_Distance(small_data, dataset2["cube"])
tester2.distance_metric(normalize=False)
tester3 = Cramer_Distance(dataset2["cube"], small_data)
tester3.distance_metric(normalize=False)
npt.assert_almost_equal(tester2.distance, tester3.distance)
| Remove importing UnitCase from Cramer tests | Remove importing UnitCase from Cramer tests
| Python | mit | Astroua/TurbuStat,e-koch/TurbuStat | python | ## Code Before:
'''
Test functions for Cramer
'''
from unittest import TestCase
import numpy as np
import numpy.testing as npt
from ..statistics import Cramer_Distance
from ._testing_data import \
dataset1, dataset2, computed_data, computed_distances
class testCramer(TestCase):
def test_cramer(self):
self.tester = \
Cramer_Distance(dataset1["cube"],
dataset2["cube"],
noise_value1=0.1,
noise_value2=0.1).distance_metric(normalize=False)
npt.assert_allclose(self.tester.data_matrix1,
computed_data["cramer_val"])
npt.assert_almost_equal(self.tester.distance,
computed_distances['cramer_distance'])
def test_cramer_spatial_diff(self):
small_data = dataset1["cube"][0][:, :26, :26]
self.tester2 = Cramer_Distance(small_data, dataset2["cube"])
self.tester2.distance_metric(normalize=False)
self.tester3 = Cramer_Distance(dataset2["cube"], small_data)
self.tester3.distance_metric(normalize=False)
npt.assert_almost_equal(self.tester2.distance, self.tester3.distance)
## Instruction:
Remove importing UnitCase from Cramer tests
## Code After:
'''
Test functions for Cramer
'''
import numpy.testing as npt
from ..statistics import Cramer_Distance
from ._testing_data import \
dataset1, dataset2, computed_data, computed_distances
def test_cramer():
tester = \
Cramer_Distance(dataset1["cube"],
dataset2["cube"],
noise_value1=0.1,
noise_value2=0.1).distance_metric(normalize=False)
npt.assert_allclose(tester.data_matrix1,
computed_data["cramer_val"])
npt.assert_almost_equal(tester.distance,
computed_distances['cramer_distance'])
def test_cramer_spatial_diff():
small_data = dataset1["cube"][0][:, :26, :26]
tester2 = Cramer_Distance(small_data, dataset2["cube"])
tester2.distance_metric(normalize=False)
tester3 = Cramer_Distance(dataset2["cube"], small_data)
tester3.distance_metric(normalize=False)
npt.assert_almost_equal(tester2.distance, tester3.distance)
|
'''
Test functions for Cramer
'''
- from unittest import TestCase
-
- import numpy as np
import numpy.testing as npt
from ..statistics import Cramer_Distance
from ._testing_data import \
dataset1, dataset2, computed_data, computed_distances
- class testCramer(TestCase):
+ def test_cramer():
+ tester = \
+ Cramer_Distance(dataset1["cube"],
+ dataset2["cube"],
+ noise_value1=0.1,
+ noise_value2=0.1).distance_metric(normalize=False)
+ npt.assert_allclose(tester.data_matrix1,
+ computed_data["cramer_val"])
+ npt.assert_almost_equal(tester.distance,
+ computed_distances['cramer_distance'])
- def test_cramer(self):
- self.tester = \
- Cramer_Distance(dataset1["cube"],
- dataset2["cube"],
- noise_value1=0.1,
- noise_value2=0.1).distance_metric(normalize=False)
- npt.assert_allclose(self.tester.data_matrix1,
- computed_data["cramer_val"])
- npt.assert_almost_equal(self.tester.distance,
- computed_distances['cramer_distance'])
- def test_cramer_spatial_diff(self):
? ---- ----
+ def test_cramer_spatial_diff():
- small_data = dataset1["cube"][0][:, :26, :26]
? ----
+ small_data = dataset1["cube"][0][:, :26, :26]
- self.tester2 = Cramer_Distance(small_data, dataset2["cube"])
? ---------
+ tester2 = Cramer_Distance(small_data, dataset2["cube"])
- self.tester2.distance_metric(normalize=False)
? ---------
+ tester2.distance_metric(normalize=False)
- self.tester3 = Cramer_Distance(dataset2["cube"], small_data)
? ---------
+ tester3 = Cramer_Distance(dataset2["cube"], small_data)
- self.tester3.distance_metric(normalize=False)
? ---------
+ tester3.distance_metric(normalize=False)
- npt.assert_almost_equal(self.tester2.distance, self.tester3.distance)
? ---- ----- -----
+ npt.assert_almost_equal(tester2.distance, tester3.distance) | 38 | 0.95 | 17 | 21 |
19c0262faf475e7fa2e04d6028e8e6143a43adc2 | docker-entrypoint.sh | docker-entrypoint.sh | set -e
NAME='elasticsearch'
if [ "$1" = "$NAME" ]; then
MAX_OPEN_FILES=65535
# Those are just defaults, they can be overriden with -Des.config=...
CONF_DIR=/etc/$NAME
CONF_FILE=$CONF_DIR/elasticsearch.yml
WORK_DIR=/tmp/$NAME
OPTS="--default.config=$CONF_FILE --default.path.home=$ES_HOME --default.path.logs=$ES_LOGS --default.path.data=$ES_DATA --default.path.work=$WORK_DIR --default.path.conf=$CONF_DIR"
mkdir -p "$ES_LOGS" "$ES_DATA" "$WORK_DIR" && chown -R "$ES_USER":"$ES_GROUP" "$ES_LOGS" "$ES_DATA" "$WORK_DIR"
if [ -n "$MAX_OPEN_FILES" ]; then
ulimit -n $MAX_OPEN_FILES
fi
shift
exec gosu "$ES_USER:$ES_GROUP" "$NAME" $OPTS "$@"
fi
exec "$@"
| set -e
NAME='elasticsearch'
if [ "$1" = "$NAME" ]; then
# Those are just defaults, they can be overriden with -Des.config=...
CONF_DIR=/etc/$NAME
CONF_FILE=$CONF_DIR/elasticsearch.yml
WORK_DIR=/tmp/$NAME
OPTS="--default.config=$CONF_FILE --default.path.home=$ES_HOME --default.path.logs=$ES_LOGS --default.path.data=$ES_DATA --default.path.work=$WORK_DIR --default.path.conf=$CONF_DIR"
mkdir -p "$ES_LOGS" "$ES_DATA" "$WORK_DIR" && chown -R "$ES_USER":"$ES_GROUP" "$ES_LOGS" "$ES_DATA" "$WORK_DIR"
shift
exec gosu "$ES_USER:$ES_GROUP" "$NAME" $OPTS "$@"
fi
exec "$@"
| Remove ulimit settings from entrypoint | Remove ulimit settings from entrypoint
Raising ulimits inside the container like this won't work. Setting them from
outside the container will work once https://github.com/docker/docker/pull/9437
is fixed.
| Shell | mit | krallin/docker-elasticsearch | shell | ## Code Before:
set -e
NAME='elasticsearch'
if [ "$1" = "$NAME" ]; then
MAX_OPEN_FILES=65535
# Those are just defaults, they can be overriden with -Des.config=...
CONF_DIR=/etc/$NAME
CONF_FILE=$CONF_DIR/elasticsearch.yml
WORK_DIR=/tmp/$NAME
OPTS="--default.config=$CONF_FILE --default.path.home=$ES_HOME --default.path.logs=$ES_LOGS --default.path.data=$ES_DATA --default.path.work=$WORK_DIR --default.path.conf=$CONF_DIR"
mkdir -p "$ES_LOGS" "$ES_DATA" "$WORK_DIR" && chown -R "$ES_USER":"$ES_GROUP" "$ES_LOGS" "$ES_DATA" "$WORK_DIR"
if [ -n "$MAX_OPEN_FILES" ]; then
ulimit -n $MAX_OPEN_FILES
fi
shift
exec gosu "$ES_USER:$ES_GROUP" "$NAME" $OPTS "$@"
fi
exec "$@"
## Instruction:
Remove ulimit settings from entrypoint
Raising ulimits inside the container like this won't work. Setting them from
outside the container will work once https://github.com/docker/docker/pull/9437
is fixed.
## Code After:
set -e
NAME='elasticsearch'
if [ "$1" = "$NAME" ]; then
# Those are just defaults, they can be overriden with -Des.config=...
CONF_DIR=/etc/$NAME
CONF_FILE=$CONF_DIR/elasticsearch.yml
WORK_DIR=/tmp/$NAME
OPTS="--default.config=$CONF_FILE --default.path.home=$ES_HOME --default.path.logs=$ES_LOGS --default.path.data=$ES_DATA --default.path.work=$WORK_DIR --default.path.conf=$CONF_DIR"
mkdir -p "$ES_LOGS" "$ES_DATA" "$WORK_DIR" && chown -R "$ES_USER":"$ES_GROUP" "$ES_LOGS" "$ES_DATA" "$WORK_DIR"
shift
exec gosu "$ES_USER:$ES_GROUP" "$NAME" $OPTS "$@"
fi
exec "$@"
| set -e
NAME='elasticsearch'
if [ "$1" = "$NAME" ]; then
- MAX_OPEN_FILES=65535
-
# Those are just defaults, they can be overriden with -Des.config=...
CONF_DIR=/etc/$NAME
CONF_FILE=$CONF_DIR/elasticsearch.yml
WORK_DIR=/tmp/$NAME
OPTS="--default.config=$CONF_FILE --default.path.home=$ES_HOME --default.path.logs=$ES_LOGS --default.path.data=$ES_DATA --default.path.work=$WORK_DIR --default.path.conf=$CONF_DIR"
mkdir -p "$ES_LOGS" "$ES_DATA" "$WORK_DIR" && chown -R "$ES_USER":"$ES_GROUP" "$ES_LOGS" "$ES_DATA" "$WORK_DIR"
- if [ -n "$MAX_OPEN_FILES" ]; then
- ulimit -n $MAX_OPEN_FILES
- fi
-
shift
exec gosu "$ES_USER:$ES_GROUP" "$NAME" $OPTS "$@"
fi
exec "$@" | 6 | 0.24 | 0 | 6 |
4066892bbe34205fcf0a92dd86096fe849ea017a | app/models/account.js | app/models/account.js | import DS from 'ember-data';
export default DS.Model.extend({
email: DS.attr(),
following: DS.attr(),
favorites: DS.attr(),
pins: DS.attr()
});
| import DS from 'ember-data';
export default DS.Model.extend({
email: DS.attr(),
following: DS.attr('array', {defaultValue: []}),
favorites: DS.attr('array', {defaultValue: []}),
pins: DS.attr('array', {defaultValue: []})
});
| Set default values for Account | Set default values for Account
| JavaScript | mit | stevenwu/hacker-news | javascript | ## Code Before:
import DS from 'ember-data';
export default DS.Model.extend({
email: DS.attr(),
following: DS.attr(),
favorites: DS.attr(),
pins: DS.attr()
});
## Instruction:
Set default values for Account
## Code After:
import DS from 'ember-data';
export default DS.Model.extend({
email: DS.attr(),
following: DS.attr('array', {defaultValue: []}),
favorites: DS.attr('array', {defaultValue: []}),
pins: DS.attr('array', {defaultValue: []})
});
| import DS from 'ember-data';
export default DS.Model.extend({
email: DS.attr(),
- following: DS.attr(),
- favorites: DS.attr(),
- pins: DS.attr()
+ following: DS.attr('array', {defaultValue: []}),
+ favorites: DS.attr('array', {defaultValue: []}),
+ pins: DS.attr('array', {defaultValue: []})
}); | 6 | 0.75 | 3 | 3 |
80da0d5330edf69a5ec1e3dc553264c5668cd146 | app/views/admin/enterprises/_ng_form.html.haml | app/views/admin/enterprises/_ng_form.html.haml | -# Not all inputs are ng inputs, they don't make the ng-form dirty on change.
-# ng-change is only valid for inputs, not for a form.
-# So we use onchange and have to get the scope to access the ng controller
= form_for [main_app, :admin, @enterprise], html: { name: "enterprise_form",
"ng-controller" => 'enterpriseCtrl',
"ng-cloak" => true } do |f|
%save-bar{ dirty: "enterprise_form.$dirty", persist: "true" }
%input.red{ type: "button", value: t(:update), ng: { click: "submit()", disabled: "!enterprise_form.$dirty" } }
%input{ type: "button", ng: { value: "enterprise_form.$dirty ? '#{t(:cancel)}' : '#{t(:close)}'", click: "cancel('#{main_app.admin_enterprises_path}')" } }
.row
.sixteen.columns.alpha
.four.columns.alpha
= render 'admin/shared/side_menu'
.one.column
.eleven.columns.omega.fullwidth_inputs
= render 'form', f: f
| -# Not all inputs are ng inputs, they don't make the ng-form dirty on change.
-# ng-change is only valid for inputs, not for a form.
-# So we use onchange and have to get the scope to access the ng controller
= form_for [main_app, :admin, @enterprise], html: { name: "enterprise_form",
onchange: "angular.element(enterprise_form).scope().setFormDirty()",
"ng-controller" => 'enterpriseCtrl',
"ng-cloak" => true } do |f|
%save-bar{ dirty: "enterprise_form.$dirty", persist: "true" }
%input.red{ type: "button", value: t(:update), ng: { click: "submit()", disabled: "!enterprise_form.$dirty" } }
%input{ type: "button", ng: { value: "enterprise_form.$dirty ? '#{t(:cancel)}' : '#{t(:close)}'", click: "cancel('#{main_app.admin_enterprises_path}')" } }
.row
.sixteen.columns.alpha
.four.columns.alpha
= render 'admin/shared/side_menu'
.one.column
.eleven.columns.omega.fullwidth_inputs
= render 'form', f: f
| Set enterprise form dirty on any change | Set enterprise form dirty on any change
Commit 4953c69123059e44f64d0a2ea24f001cd6b2dff4 introduced a bug that
the save button is not activated when changing enterprise fields.
https://github.com/openfoodfoundation/openfoodnetwork/issues/2195
This is reverting the change and fixes #2195.
Sadly, it re-opens
https://github.com/openfoodfoundation/openfoodnetwork/issues/1216.
But that one is not as severe as the current bug.
| Haml | agpl-3.0 | mkllnk/openfoodnetwork,Matt-Yorkley/openfoodnetwork,Matt-Yorkley/openfoodnetwork,oeoeaio/openfoodnetwork,lin-d-hop/openfoodnetwork,openfoodfoundation/openfoodnetwork,lin-d-hop/openfoodnetwork,oeoeaio/openfoodnetwork,openfoodfoundation/openfoodnetwork,Matt-Yorkley/openfoodnetwork,oeoeaio/openfoodnetwork,mkllnk/openfoodnetwork,mkllnk/openfoodnetwork,Matt-Yorkley/openfoodnetwork,lin-d-hop/openfoodnetwork,mkllnk/openfoodnetwork,openfoodfoundation/openfoodnetwork,oeoeaio/openfoodnetwork,openfoodfoundation/openfoodnetwork,lin-d-hop/openfoodnetwork | haml | ## Code Before:
-# Not all inputs are ng inputs, they don't make the ng-form dirty on change.
-# ng-change is only valid for inputs, not for a form.
-# So we use onchange and have to get the scope to access the ng controller
= form_for [main_app, :admin, @enterprise], html: { name: "enterprise_form",
"ng-controller" => 'enterpriseCtrl',
"ng-cloak" => true } do |f|
%save-bar{ dirty: "enterprise_form.$dirty", persist: "true" }
%input.red{ type: "button", value: t(:update), ng: { click: "submit()", disabled: "!enterprise_form.$dirty" } }
%input{ type: "button", ng: { value: "enterprise_form.$dirty ? '#{t(:cancel)}' : '#{t(:close)}'", click: "cancel('#{main_app.admin_enterprises_path}')" } }
.row
.sixteen.columns.alpha
.four.columns.alpha
= render 'admin/shared/side_menu'
.one.column
.eleven.columns.omega.fullwidth_inputs
= render 'form', f: f
## Instruction:
Set enterprise form dirty on any change
Commit 4953c69123059e44f64d0a2ea24f001cd6b2dff4 introduced a bug that
the save button is not activated when changing enterprise fields.
https://github.com/openfoodfoundation/openfoodnetwork/issues/2195
This is reverting the change and fixes #2195.
Sadly, it re-opens
https://github.com/openfoodfoundation/openfoodnetwork/issues/1216.
But that one is not as severe as the current bug.
## Code After:
-# Not all inputs are ng inputs, they don't make the ng-form dirty on change.
-# ng-change is only valid for inputs, not for a form.
-# So we use onchange and have to get the scope to access the ng controller
= form_for [main_app, :admin, @enterprise], html: { name: "enterprise_form",
onchange: "angular.element(enterprise_form).scope().setFormDirty()",
"ng-controller" => 'enterpriseCtrl',
"ng-cloak" => true } do |f|
%save-bar{ dirty: "enterprise_form.$dirty", persist: "true" }
%input.red{ type: "button", value: t(:update), ng: { click: "submit()", disabled: "!enterprise_form.$dirty" } }
%input{ type: "button", ng: { value: "enterprise_form.$dirty ? '#{t(:cancel)}' : '#{t(:close)}'", click: "cancel('#{main_app.admin_enterprises_path}')" } }
.row
.sixteen.columns.alpha
.four.columns.alpha
= render 'admin/shared/side_menu'
.one.column
.eleven.columns.omega.fullwidth_inputs
= render 'form', f: f
| -# Not all inputs are ng inputs, they don't make the ng-form dirty on change.
-# ng-change is only valid for inputs, not for a form.
-# So we use onchange and have to get the scope to access the ng controller
= form_for [main_app, :admin, @enterprise], html: { name: "enterprise_form",
+ onchange: "angular.element(enterprise_form).scope().setFormDirty()",
"ng-controller" => 'enterpriseCtrl',
"ng-cloak" => true } do |f|
%save-bar{ dirty: "enterprise_form.$dirty", persist: "true" }
%input.red{ type: "button", value: t(:update), ng: { click: "submit()", disabled: "!enterprise_form.$dirty" } }
%input{ type: "button", ng: { value: "enterprise_form.$dirty ? '#{t(:cancel)}' : '#{t(:close)}'", click: "cancel('#{main_app.admin_enterprises_path}')" } }
.row
.sixteen.columns.alpha
.four.columns.alpha
= render 'admin/shared/side_menu'
.one.column
.eleven.columns.omega.fullwidth_inputs
= render 'form', f: f | 1 | 0.055556 | 1 | 0 |
ac40e63c4ab3bf9d1802c35b0f7f46072c3cbd8a | mx.mozart-graal/native-image.properties | mx.mozart-graal/native-image.properties | Args = -H:MaxRuntimeCompileMethods=350 \
-H:ReflectionConfigurationResources=org/mozartoz/truffle/reflection.json
# -H:+RuntimeAssertions
JavaArgs = -Dpolyglot.engine.PreinitializeContexts=oz
| Args = -H:MaxRuntimeCompileMethods=350 \
-H:ReflectionConfigurationResources=org/mozartoz/truffle/reflection.json \
--initialize-at-build-time
# -H:+RuntimeAssertions
JavaArgs = -Dpolyglot.engine.PreinitializeContexts=oz
| Make sure to initialize all static initializers at image build time | Make sure to initialize all static initializers at image build time
| INI | bsd-2-clause | eregon/mozart-graal,eregon/mozart-graal,eregon/mozart-graal,eregon/mozart-graal,eregon/mozart-graal,eregon/mozart-graal | ini | ## Code Before:
Args = -H:MaxRuntimeCompileMethods=350 \
-H:ReflectionConfigurationResources=org/mozartoz/truffle/reflection.json
# -H:+RuntimeAssertions
JavaArgs = -Dpolyglot.engine.PreinitializeContexts=oz
## Instruction:
Make sure to initialize all static initializers at image build time
## Code After:
Args = -H:MaxRuntimeCompileMethods=350 \
-H:ReflectionConfigurationResources=org/mozartoz/truffle/reflection.json \
--initialize-at-build-time
# -H:+RuntimeAssertions
JavaArgs = -Dpolyglot.engine.PreinitializeContexts=oz
| Args = -H:MaxRuntimeCompileMethods=350 \
- -H:ReflectionConfigurationResources=org/mozartoz/truffle/reflection.json
+ -H:ReflectionConfigurationResources=org/mozartoz/truffle/reflection.json \
? ++
+ --initialize-at-build-time
# -H:+RuntimeAssertions
JavaArgs = -Dpolyglot.engine.PreinitializeContexts=oz | 3 | 0.5 | 2 | 1 |
9a7bb324fcfe0ef575f9d3a16a2e543f3d75892e | package.js | package.js | Package.describe({
name: 'dburles:two-factor',
version: '1.3.0',
summary: 'Two-factor authentication for accounts-password',
git: 'https://github.com/dburles/meteor-two-factor.git',
documentation: 'README.md',
});
Package.onUse(function(api) {
api.versionsFrom('1.2.1');
api.use(['ecmascript', 'check']);
api.use('reactive-dict', 'client');
api.use('accounts-password', ['client', 'server']);
api.addFiles('common.js');
api.addFiles('client.js', 'client');
api.addFiles('server.js', 'server');
api.export('twoFactor');
});
Package.onTest(function(api) {
api.use('ecmascript');
api.use('tinytest');
api.use('dburles:two-factor');
api.addFiles('tests.js');
});
| Package.describe({
name: 'dburles:two-factor',
version: '1.3.1',
summary: 'Two-factor authentication for accounts-password',
git: 'https://github.com/dburles/meteor-two-factor.git',
documentation: 'README.md',
});
Package.onUse(function(api) {
api.versionsFrom(['1.2.1', '2.3']);
api.use(['ecmascript', 'check']);
api.use('reactive-dict', 'client');
api.use('accounts-password', ['client', 'server']);
api.addFiles('common.js');
api.addFiles('client.js', 'client');
api.addFiles('server.js', 'server');
api.export('twoFactor');
});
Package.onTest(function(api) {
api.use('ecmascript');
api.use('tinytest');
api.use('dburles:two-factor');
api.addFiles('tests.js');
});
| Make compatible with Meteor 2.3+ | Make compatible with Meteor 2.3+ | JavaScript | mit | dburles/meteor-two-factor | javascript | ## Code Before:
Package.describe({
name: 'dburles:two-factor',
version: '1.3.0',
summary: 'Two-factor authentication for accounts-password',
git: 'https://github.com/dburles/meteor-two-factor.git',
documentation: 'README.md',
});
Package.onUse(function(api) {
api.versionsFrom('1.2.1');
api.use(['ecmascript', 'check']);
api.use('reactive-dict', 'client');
api.use('accounts-password', ['client', 'server']);
api.addFiles('common.js');
api.addFiles('client.js', 'client');
api.addFiles('server.js', 'server');
api.export('twoFactor');
});
Package.onTest(function(api) {
api.use('ecmascript');
api.use('tinytest');
api.use('dburles:two-factor');
api.addFiles('tests.js');
});
## Instruction:
Make compatible with Meteor 2.3+
## Code After:
Package.describe({
name: 'dburles:two-factor',
version: '1.3.1',
summary: 'Two-factor authentication for accounts-password',
git: 'https://github.com/dburles/meteor-two-factor.git',
documentation: 'README.md',
});
Package.onUse(function(api) {
api.versionsFrom(['1.2.1', '2.3']);
api.use(['ecmascript', 'check']);
api.use('reactive-dict', 'client');
api.use('accounts-password', ['client', 'server']);
api.addFiles('common.js');
api.addFiles('client.js', 'client');
api.addFiles('server.js', 'server');
api.export('twoFactor');
});
Package.onTest(function(api) {
api.use('ecmascript');
api.use('tinytest');
api.use('dburles:two-factor');
api.addFiles('tests.js');
});
| Package.describe({
name: 'dburles:two-factor',
- version: '1.3.0',
? ^
+ version: '1.3.1',
? ^
summary: 'Two-factor authentication for accounts-password',
git: 'https://github.com/dburles/meteor-two-factor.git',
documentation: 'README.md',
});
Package.onUse(function(api) {
- api.versionsFrom('1.2.1');
+ api.versionsFrom(['1.2.1', '2.3']);
? + ++++++++
api.use(['ecmascript', 'check']);
api.use('reactive-dict', 'client');
api.use('accounts-password', ['client', 'server']);
api.addFiles('common.js');
api.addFiles('client.js', 'client');
api.addFiles('server.js', 'server');
api.export('twoFactor');
});
Package.onTest(function(api) {
api.use('ecmascript');
api.use('tinytest');
api.use('dburles:two-factor');
api.addFiles('tests.js');
}); | 4 | 0.16 | 2 | 2 |
5769acaa46aca4cfbcb3085fc9dd935aad86ca38 | tox.ini | tox.ini | [tox]
envlist = py{27,34}
[testenv]
basepython =
py27: python2.7
py34: python3.4
deps =
check-manifest
{py27,py34}: readme_renderer
flake8
pytest
commands =
check-manifest --ignore tox.ini,tests*
{py27,py34}: python setup.py check -m -r -s
flake8 .
python setup.py nosetests
[flake8]
exclude = .tox,*.egg,build,da
max-line-length = 160
| [tox]
envlist = py{27,34}
[testenv]
basepython =
py27: python2.7
py34: python3.4
deps =
check-manifest
{py27,py34}: readme_renderer
flake8
pytest
commands =
check-manifest --ignore tox.ini,tests*
{py27,py34}: python setup.py check -m -r -s
flake8 kafka tests
python setup.py nosetests
[flake8]
exclude = .tox,*.egg,build,da
max-line-length = 160
| Adjust flake8 command to just kafka and tests | Adjust flake8 command to just kafka and tests
| INI | apache-2.0 | toddpalino/kafka-tools | ini | ## Code Before:
[tox]
envlist = py{27,34}
[testenv]
basepython =
py27: python2.7
py34: python3.4
deps =
check-manifest
{py27,py34}: readme_renderer
flake8
pytest
commands =
check-manifest --ignore tox.ini,tests*
{py27,py34}: python setup.py check -m -r -s
flake8 .
python setup.py nosetests
[flake8]
exclude = .tox,*.egg,build,da
max-line-length = 160
## Instruction:
Adjust flake8 command to just kafka and tests
## Code After:
[tox]
envlist = py{27,34}
[testenv]
basepython =
py27: python2.7
py34: python3.4
deps =
check-manifest
{py27,py34}: readme_renderer
flake8
pytest
commands =
check-manifest --ignore tox.ini,tests*
{py27,py34}: python setup.py check -m -r -s
flake8 kafka tests
python setup.py nosetests
[flake8]
exclude = .tox,*.egg,build,da
max-line-length = 160
| [tox]
envlist = py{27,34}
[testenv]
basepython =
py27: python2.7
py34: python3.4
deps =
check-manifest
{py27,py34}: readme_renderer
flake8
pytest
commands =
check-manifest --ignore tox.ini,tests*
{py27,py34}: python setup.py check -m -r -s
- flake8 .
+ flake8 kafka tests
python setup.py nosetests
[flake8]
exclude = .tox,*.egg,build,da
max-line-length = 160 | 2 | 0.095238 | 1 | 1 |
a228a29c20e023d894fe549212513a0a04429e91 | .travis.yml | .travis.yml | language: c
script:
- ./bootstrap
- ./configure --with-gmetad --without-php --with-perl=/usr --enable-status --with-memcached
- make all
before_script:
- echo 'yes' | sudo add-apt-repository ppa:ondrej/php5
- sudo apt-get update
- sudo apt-get -o Dpkg::Options::="--force-overwrite" -y install libapr1-dev libaprutil1-dev libconfuse-dev libexpat1-dev libpcre3-dev libssl-dev librrd-dev libperl-dev libmemcached-dev libtool m4 gperf zlib1g-dev pkg-config
| language: c
script:
- ./bootstrap
- ./configure --with-gmetad --without-php --with-perl=/usr --enable-status --with-memcached
- make all
before_script:
- echo 'yes' | sudo add-apt-repository ppa:ondrej/php5
- sudo apt-get update
- sudo apt-get -o Dpkg::Options::="--force-overwrite" -y install libapr1-dev libaprutil1-dev libconfuse-dev libexpat1-dev libpcre3-dev libssl-dev librrd-dev libperl-dev libmemcached-dev libtool m4 gperf zlib1g-dev pkg-config
- wget -c http://concurrencykit.org.nyud.net/releases/ck-0.3.5.tar.gz; tar zxvf ck-0.3.5.tar.gz ; (cd ck-0.3.5/ ; ./configure && make all && sudo make install )
| Add Concurrency Kit support to compile. | Add Concurrency Kit support to compile.
| YAML | bsd-3-clause | sdgdsffdsfff/monitor-core,torkelsson/monitor-core,ganglia/monitor-core,sdgdsffdsfff/monitor-core,mjzhou/monitor-core,lawrencewu/monitor-core,ganglia/monitor-core,phreakocious/monitor-core,torkelsson/monitor-core,sdgdsffdsfff/monitor-core,NoodlesNZ/monitor-core,phreakocious/monitor-core,NoodlesNZ/monitor-core,ganglia/monitor-core,torkelsson/monitor-core,ganglia/monitor-core,phreakocious/monitor-core,NoodlesNZ/monitor-core,lawrencewu/monitor-core,torkelsson/monitor-core,torkelsson/monitor-core,fastly/monitor-core,torkelsson/monitor-core,mjzhou/monitor-core,fastly/monitor-core,mjzhou/monitor-core,torkelsson/monitor-core,mjzhou/monitor-core,lawrencewu/monitor-core,lawrencewu/monitor-core,phreakocious/monitor-core,mjzhou/monitor-core,phreakocious/monitor-core,fastly/monitor-core,ganglia/monitor-core,phreakocious/monitor-core,fastly/monitor-core,NoodlesNZ/monitor-core,sdgdsffdsfff/monitor-core,mjzhou/monitor-core,phreakocious/monitor-core,NoodlesNZ/monitor-core,fastly/monitor-core,ganglia/monitor-core,NoodlesNZ/monitor-core,lawrencewu/monitor-core,fastly/monitor-core,sdgdsffdsfff/monitor-core,ganglia/monitor-core,sdgdsffdsfff/monitor-core,fastly/monitor-core,NoodlesNZ/monitor-core,sdgdsffdsfff/monitor-core,ganglia/monitor-core,lawrencewu/monitor-core,lawrencewu/monitor-core,mjzhou/monitor-core | yaml | ## Code Before:
language: c
script:
- ./bootstrap
- ./configure --with-gmetad --without-php --with-perl=/usr --enable-status --with-memcached
- make all
before_script:
- echo 'yes' | sudo add-apt-repository ppa:ondrej/php5
- sudo apt-get update
- sudo apt-get -o Dpkg::Options::="--force-overwrite" -y install libapr1-dev libaprutil1-dev libconfuse-dev libexpat1-dev libpcre3-dev libssl-dev librrd-dev libperl-dev libmemcached-dev libtool m4 gperf zlib1g-dev pkg-config
## Instruction:
Add Concurrency Kit support to compile.
## Code After:
language: c
script:
- ./bootstrap
- ./configure --with-gmetad --without-php --with-perl=/usr --enable-status --with-memcached
- make all
before_script:
- echo 'yes' | sudo add-apt-repository ppa:ondrej/php5
- sudo apt-get update
- sudo apt-get -o Dpkg::Options::="--force-overwrite" -y install libapr1-dev libaprutil1-dev libconfuse-dev libexpat1-dev libpcre3-dev libssl-dev librrd-dev libperl-dev libmemcached-dev libtool m4 gperf zlib1g-dev pkg-config
- wget -c http://concurrencykit.org.nyud.net/releases/ck-0.3.5.tar.gz; tar zxvf ck-0.3.5.tar.gz ; (cd ck-0.3.5/ ; ./configure && make all && sudo make install )
| language: c
script:
- ./bootstrap
- ./configure --with-gmetad --without-php --with-perl=/usr --enable-status --with-memcached
- make all
before_script:
- echo 'yes' | sudo add-apt-repository ppa:ondrej/php5
- sudo apt-get update
- sudo apt-get -o Dpkg::Options::="--force-overwrite" -y install libapr1-dev libaprutil1-dev libconfuse-dev libexpat1-dev libpcre3-dev libssl-dev librrd-dev libperl-dev libmemcached-dev libtool m4 gperf zlib1g-dev pkg-config
+ - wget -c http://concurrencykit.org.nyud.net/releases/ck-0.3.5.tar.gz; tar zxvf ck-0.3.5.tar.gz ; (cd ck-0.3.5/ ; ./configure && make all && sudo make install )
+ | 2 | 0.222222 | 2 | 0 |
e89fbea5fc930d555d3e6ce0bf1a1fb7ca63d9f3 | setup.cfg | setup.cfg | [isort]
multi_line_output=3
include_trailing_comma=true
lines_after_imports=2
known_first_party=dependencies,helpers,django_project
known_third_party=django,django_filters
| [isort]
multi_line_output=3
include_trailing_comma=true
line_length=88
lines_after_imports=2
known_first_party=dependencies,helpers,django_project
known_third_party=django,django_filters
| Set the right line length for isort. | Set the right line length for isort.
| INI | bsd-2-clause | proofit404/dependencies,proofit404/dependencies,proofit404/dependencies,proofit404/dependencies | ini | ## Code Before:
[isort]
multi_line_output=3
include_trailing_comma=true
lines_after_imports=2
known_first_party=dependencies,helpers,django_project
known_third_party=django,django_filters
## Instruction:
Set the right line length for isort.
## Code After:
[isort]
multi_line_output=3
include_trailing_comma=true
line_length=88
lines_after_imports=2
known_first_party=dependencies,helpers,django_project
known_third_party=django,django_filters
| [isort]
multi_line_output=3
include_trailing_comma=true
+ line_length=88
lines_after_imports=2
known_first_party=dependencies,helpers,django_project
known_third_party=django,django_filters | 1 | 0.166667 | 1 | 0 |
bbea48e5870e74f7d0cca4ae02936cd8a9bce918 | oscar/templates/oscar/dashboard/offers/condition_form.html | oscar/templates/oscar/dashboard/offers/condition_form.html | {% extends 'dashboard/offers/step_form.html' %}
{% load i18n %}
{% block summary %}
{% include 'dashboard/offers/summary.html' %}
{% endblock %}
{% block progress %}
{% include 'dashboard/offers/progress.html' with step=3 %}
{% endblock %}
{% block form_fields %}
<span class="help-block">{{ form.non_field_errors }}</span>
<h4>{% trans "Build a new condition" %}</h4>
{% include "partials/form_field.html" with field=form.range %}
<p>{% trans "Ranges can be created and edited from within the " %}
<a href="{% url dashboard:range-list %}">{% trans "range dashboard" %}</a>.</p>
{% include "partials/form_field.html" with field=form.type %}
{% include "partials/form_field.html" with field=form.value %}
{% if form.fields.custom_condition.choices %}
<h4>{% trans "...or choose a pre-defined one" %}</h4>
{% include "partials/form_field.html" with field=form.custom_condition %}
{% endif %}
{% endblock %}
{% block submittext %}{% trans "Continue to step 4" %}{% endblock %}
| {% extends 'dashboard/offers/step_form.html' %}
{% load i18n %}
{% block summary %}
{% include 'dashboard/offers/summary.html' %}
{% endblock %}
{% block progress %}
{% include 'dashboard/offers/progress.html' with step=3 %}
{% endblock %}
{% block form_fields %}
<span class="help-block">{{ form.non_field_errors }}</span>
{% if form.fields.custom_condition.choices %}
<h4>{% trans "Build a new condition" %}</h4>
{% endif %}
{% include "partials/form_field.html" with field=form.range %}
<p>{% trans "Ranges can be created and edited from within the " %}
<a href="{% url dashboard:range-list %}">{% trans "range dashboard" %}</a>.</p>
{% include "partials/form_field.html" with field=form.type %}
{% include "partials/form_field.html" with field=form.value %}
{% if form.fields.custom_condition.choices %}
<h4>{% trans "...or choose a pre-defined one" %}</h4>
{% include "partials/form_field.html" with field=form.custom_condition %}
{% endif %}
{% endblock %}
{% block submittext %}{% trans "Continue to step 4" %}{% endblock %}
| Fix a conditional logic bug in condition form template | Fix a conditional logic bug in condition form template
Was showing a heading unnecessarily.
| HTML | bsd-3-clause | amirrpp/django-oscar,lijoantony/django-oscar,MatthewWilkes/django-oscar,marcoantoniooliveira/labweb,kapari/django-oscar,dongguangming/django-oscar,kapari/django-oscar,sasha0/django-oscar,solarissmoke/django-oscar,nickpack/django-oscar,anentropic/django-oscar,WadeYuChen/django-oscar,adamend/django-oscar,amirrpp/django-oscar,adamend/django-oscar,Idematica/django-oscar,michaelkuty/django-oscar,MatthewWilkes/django-oscar,kapt/django-oscar,bnprk/django-oscar,makielab/django-oscar,ademuk/django-oscar,Bogh/django-oscar,makielab/django-oscar,adamend/django-oscar,WillisXChen/django-oscar,michaelkuty/django-oscar,django-oscar/django-oscar,john-parton/django-oscar,django-oscar/django-oscar,jinnykoo/christmas,marcoantoniooliveira/labweb,WillisXChen/django-oscar,eddiep1101/django-oscar,saadatqadri/django-oscar,okfish/django-oscar,sonofatailor/django-oscar,monikasulik/django-oscar,bnprk/django-oscar,ahmetdaglarbas/e-commerce,nfletton/django-oscar,WillisXChen/django-oscar,vovanbo/django-oscar,rocopartners/django-oscar,jmt4/django-oscar,saadatqadri/django-oscar,pdonadeo/django-oscar,nickpack/django-oscar,manevant/django-oscar,michaelkuty/django-oscar,okfish/django-oscar,jmt4/django-oscar,jinnykoo/wuyisj,pdonadeo/django-oscar,saadatqadri/django-oscar,itbabu/django-oscar,nickpack/django-oscar,kapt/django-oscar,monikasulik/django-oscar,ademuk/django-oscar,john-parton/django-oscar,nfletton/django-oscar,bschuon/django-oscar,bnprk/django-oscar,Jannes123/django-oscar,nfletton/django-oscar,faratro/django-oscar,pasqualguerrero/django-oscar,anentropic/django-oscar,sonofatailor/django-oscar,john-parton/django-oscar,bschuon/django-oscar,jinnykoo/wuyisj,jmt4/django-oscar,django-oscar/django-oscar,taedori81/django-oscar,solarissmoke/django-oscar,sasha0/django-oscar,makielab/django-oscar,machtfit/django-oscar,jlmadurga/django-oscar,jinnykoo/wuyisj.com,itbabu/django-oscar,binarydud/django-oscar,pasqualguerrero/django-oscar,Idematica/django-oscar,john-parton/django-oscar,kapari/django-oscar,pdonadeo/django-oscar,dongguangming/django-oscar,binarydud/django-oscar,jlmadurga/django-oscar,pasqualguerrero/django-oscar,sasha0/django-oscar,lijoantony/django-oscar,DrOctogon/unwash_ecom,jinnykoo/wuyisj,django-oscar/django-oscar,faratro/django-oscar,jinnykoo/wuyisj.com,amirrpp/django-oscar,taedori81/django-oscar,ahmetdaglarbas/e-commerce,manevant/django-oscar,faratro/django-oscar,jinnykoo/wuyisj.com,spartonia/django-oscar,Bogh/django-oscar,solarissmoke/django-oscar,anentropic/django-oscar,WillisXChen/django-oscar,Idematica/django-oscar,WadeYuChen/django-oscar,mexeniz/django-oscar,monikasulik/django-oscar,ahmetdaglarbas/e-commerce,itbabu/django-oscar,ka7eh/django-oscar,ademuk/django-oscar,marcoantoniooliveira/labweb,WadeYuChen/django-oscar,dongguangming/django-oscar,bnprk/django-oscar,jlmadurga/django-oscar,rocopartners/django-oscar,ahmetdaglarbas/e-commerce,rocopartners/django-oscar,rocopartners/django-oscar,jlmadurga/django-oscar,vovanbo/django-oscar,bschuon/django-oscar,taedori81/django-oscar,okfish/django-oscar,kapari/django-oscar,thechampanurag/django-oscar,QLGu/django-oscar,jinnykoo/christmas,elliotthill/django-oscar,ka7eh/django-oscar,thechampanurag/django-oscar,thechampanurag/django-oscar,Jannes123/django-oscar,elliotthill/django-oscar,mexeniz/django-oscar,vovanbo/django-oscar,spartonia/django-oscar,eddiep1101/django-oscar,anentropic/django-oscar,okfish/django-oscar,michaelkuty/django-oscar,nfletton/django-oscar,jinnykoo/wuyisj,MatthewWilkes/django-oscar,dongguangming/django-oscar,lijoantony/django-oscar,jinnykoo/christmas,DrOctogon/unwash_ecom,sasha0/django-oscar,eddiep1101/django-oscar,nickpack/django-oscar,machtfit/django-oscar,bschuon/django-oscar,lijoantony/django-oscar,taedori81/django-oscar,josesanch/django-oscar,vovanbo/django-oscar,QLGu/django-oscar,spartonia/django-oscar,Bogh/django-oscar,WillisXChen/django-oscar,faratro/django-oscar,WillisXChen/django-oscar,manevant/django-oscar,amirrpp/django-oscar,jmt4/django-oscar,jinnykoo/wuyisj.com,MatthewWilkes/django-oscar,sonofatailor/django-oscar,manevant/django-oscar,eddiep1101/django-oscar,pdonadeo/django-oscar,sonofatailor/django-oscar,adamend/django-oscar,ka7eh/django-oscar,marcoantoniooliveira/labweb,Bogh/django-oscar,spartonia/django-oscar,josesanch/django-oscar,ademuk/django-oscar,Jannes123/django-oscar,mexeniz/django-oscar,binarydud/django-oscar,pasqualguerrero/django-oscar,Jannes123/django-oscar,QLGu/django-oscar,ka7eh/django-oscar,kapt/django-oscar,monikasulik/django-oscar,DrOctogon/unwash_ecom,machtfit/django-oscar,josesanch/django-oscar,WadeYuChen/django-oscar,binarydud/django-oscar,itbabu/django-oscar,saadatqadri/django-oscar,solarissmoke/django-oscar,thechampanurag/django-oscar,mexeniz/django-oscar,makielab/django-oscar,elliotthill/django-oscar,QLGu/django-oscar | html | ## Code Before:
{% extends 'dashboard/offers/step_form.html' %}
{% load i18n %}
{% block summary %}
{% include 'dashboard/offers/summary.html' %}
{% endblock %}
{% block progress %}
{% include 'dashboard/offers/progress.html' with step=3 %}
{% endblock %}
{% block form_fields %}
<span class="help-block">{{ form.non_field_errors }}</span>
<h4>{% trans "Build a new condition" %}</h4>
{% include "partials/form_field.html" with field=form.range %}
<p>{% trans "Ranges can be created and edited from within the " %}
<a href="{% url dashboard:range-list %}">{% trans "range dashboard" %}</a>.</p>
{% include "partials/form_field.html" with field=form.type %}
{% include "partials/form_field.html" with field=form.value %}
{% if form.fields.custom_condition.choices %}
<h4>{% trans "...or choose a pre-defined one" %}</h4>
{% include "partials/form_field.html" with field=form.custom_condition %}
{% endif %}
{% endblock %}
{% block submittext %}{% trans "Continue to step 4" %}{% endblock %}
## Instruction:
Fix a conditional logic bug in condition form template
Was showing a heading unnecessarily.
## Code After:
{% extends 'dashboard/offers/step_form.html' %}
{% load i18n %}
{% block summary %}
{% include 'dashboard/offers/summary.html' %}
{% endblock %}
{% block progress %}
{% include 'dashboard/offers/progress.html' with step=3 %}
{% endblock %}
{% block form_fields %}
<span class="help-block">{{ form.non_field_errors }}</span>
{% if form.fields.custom_condition.choices %}
<h4>{% trans "Build a new condition" %}</h4>
{% endif %}
{% include "partials/form_field.html" with field=form.range %}
<p>{% trans "Ranges can be created and edited from within the " %}
<a href="{% url dashboard:range-list %}">{% trans "range dashboard" %}</a>.</p>
{% include "partials/form_field.html" with field=form.type %}
{% include "partials/form_field.html" with field=form.value %}
{% if form.fields.custom_condition.choices %}
<h4>{% trans "...or choose a pre-defined one" %}</h4>
{% include "partials/form_field.html" with field=form.custom_condition %}
{% endif %}
{% endblock %}
{% block submittext %}{% trans "Continue to step 4" %}{% endblock %}
| {% extends 'dashboard/offers/step_form.html' %}
{% load i18n %}
{% block summary %}
{% include 'dashboard/offers/summary.html' %}
{% endblock %}
{% block progress %}
{% include 'dashboard/offers/progress.html' with step=3 %}
{% endblock %}
{% block form_fields %}
<span class="help-block">{{ form.non_field_errors }}</span>
+ {% if form.fields.custom_condition.choices %}
<h4>{% trans "Build a new condition" %}</h4>
+ {% endif %}
+
{% include "partials/form_field.html" with field=form.range %}
<p>{% trans "Ranges can be created and edited from within the " %}
<a href="{% url dashboard:range-list %}">{% trans "range dashboard" %}</a>.</p>
{% include "partials/form_field.html" with field=form.type %}
{% include "partials/form_field.html" with field=form.value %}
{% if form.fields.custom_condition.choices %}
<h4>{% trans "...or choose a pre-defined one" %}</h4>
{% include "partials/form_field.html" with field=form.custom_condition %}
{% endif %}
{% endblock %}
{% block submittext %}{% trans "Continue to step 4" %}{% endblock %} | 3 | 0.107143 | 3 | 0 |
b46201250c6290d6e9c37133d891455992b27f35 | pkgs/development/compilers/gforth/default.nix | pkgs/development/compilers/gforth/default.nix | { stdenv, fetchurl, m4 }:
stdenv.mkDerivation rec {
name = "gforth-0.7.3";
src = fetchurl {
url = "http://ftp.gnu.org/gnu/gforth/gforth-0.7.3.tar.gz";
sha256 = "1c1bahc9ypmca8rv2dijiqbangm1d9av286904yw48ph7ciz4qig";
};
buildInputs = [ m4 ];
} | { stdenv, fetchurl, m4 }:
let
version = "0.7.3";
in
stdenv.mkDerivation {
name = "gforth-${version}";
src = fetchurl {
url = "http://ftp.gnu.org/gnu/gforth/gforth-${version}.tar.gz";
sha256 = "1c1bahc9ypmca8rv2dijiqbangm1d9av286904yw48ph7ciz4qig";
};
buildInputs = [ m4 ];
postInstall = ''
mkdir -p $out/share/emacs/site-lisp
cp gforth.el $out/share/emacs/site-lisp/
'';
meta = {
description = "The Forth implementation of the GNU project";
homepage = https://www.gnu.org/software/gforth/;
license = stdenv.lib.licenses.gpl3;
platforms = stdenv.lib.platforms.all;
maintainers = with stdenv.lib.maintainers; [ the-kenny ];
};
}
| Install gforth.el & add meta-data. | gforth: Install gforth.el & add meta-data.
| Nix | mit | triton/triton,NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,SymbiFlow/nixpkgs,SymbiFlow/nixpkgs,NixOS/nixpkgs,SymbiFlow/nixpkgs,SymbiFlow/nixpkgs,SymbiFlow/nixpkgs,NixOS/nixpkgs,triton/triton,SymbiFlow/nixpkgs,NixOS/nixpkgs,triton/triton,triton/triton,SymbiFlow/nixpkgs,triton/triton,NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,triton/triton,triton/triton,NixOS/nixpkgs,NixOS/nixpkgs,SymbiFlow/nixpkgs,triton/triton,NixOS/nixpkgs,SymbiFlow/nixpkgs,NixOS/nixpkgs,SymbiFlow/nixpkgs,SymbiFlow/nixpkgs,NixOS/nixpkgs,SymbiFlow/nixpkgs,SymbiFlow/nixpkgs | nix | ## Code Before:
{ stdenv, fetchurl, m4 }:
stdenv.mkDerivation rec {
name = "gforth-0.7.3";
src = fetchurl {
url = "http://ftp.gnu.org/gnu/gforth/gforth-0.7.3.tar.gz";
sha256 = "1c1bahc9ypmca8rv2dijiqbangm1d9av286904yw48ph7ciz4qig";
};
buildInputs = [ m4 ];
}
## Instruction:
gforth: Install gforth.el & add meta-data.
## Code After:
{ stdenv, fetchurl, m4 }:
let
version = "0.7.3";
in
stdenv.mkDerivation {
name = "gforth-${version}";
src = fetchurl {
url = "http://ftp.gnu.org/gnu/gforth/gforth-${version}.tar.gz";
sha256 = "1c1bahc9ypmca8rv2dijiqbangm1d9av286904yw48ph7ciz4qig";
};
buildInputs = [ m4 ];
postInstall = ''
mkdir -p $out/share/emacs/site-lisp
cp gforth.el $out/share/emacs/site-lisp/
'';
meta = {
description = "The Forth implementation of the GNU project";
homepage = https://www.gnu.org/software/gforth/;
license = stdenv.lib.licenses.gpl3;
platforms = stdenv.lib.platforms.all;
maintainers = with stdenv.lib.maintainers; [ the-kenny ];
};
}
| { stdenv, fetchurl, m4 }:
+ let
+ version = "0.7.3";
+ in
- stdenv.mkDerivation rec {
? ----
+ stdenv.mkDerivation {
- name = "gforth-0.7.3";
+ name = "gforth-${version}";
src = fetchurl {
- url = "http://ftp.gnu.org/gnu/gforth/gforth-0.7.3.tar.gz";
? ^^^^^
+ url = "http://ftp.gnu.org/gnu/gforth/gforth-${version}.tar.gz";
? ^^^^^^^^^^
sha256 = "1c1bahc9ypmca8rv2dijiqbangm1d9av286904yw48ph7ciz4qig";
};
+
buildInputs = [ m4 ];
+
+ postInstall = ''
+ mkdir -p $out/share/emacs/site-lisp
+ cp gforth.el $out/share/emacs/site-lisp/
+ '';
+
+ meta = {
+ description = "The Forth implementation of the GNU project";
+ homepage = https://www.gnu.org/software/gforth/;
+ license = stdenv.lib.licenses.gpl3;
+ platforms = stdenv.lib.platforms.all;
+ maintainers = with stdenv.lib.maintainers; [ the-kenny ];
+ };
} | 23 | 2.3 | 20 | 3 |
ec613fe1df31dd65d8a52351a29482b54ce007b3 | skvideo/__init__.py | skvideo/__init__.py | from skvideo.stuff import *
from skvideo.version import __version__
# If you want to use Numpy's testing framerwork, use the following.
# Tests go under directory tests/, benchmarks under directory benchmarks/
from numpy.testing import Tester
test = Tester().test
bench = Tester().bench
| from skvideo.version import __version__
# If you want to use Numpy's testing framerwork, use the following.
# Tests go under directory tests/, benchmarks under directory benchmarks/
from numpy.testing import Tester
test = Tester().test
bench = Tester().bench
| Remove some unused parts of skeleton | Remove some unused parts of skeleton
| Python | bsd-3-clause | aizvorski/scikit-video | python | ## Code Before:
from skvideo.stuff import *
from skvideo.version import __version__
# If you want to use Numpy's testing framerwork, use the following.
# Tests go under directory tests/, benchmarks under directory benchmarks/
from numpy.testing import Tester
test = Tester().test
bench = Tester().bench
## Instruction:
Remove some unused parts of skeleton
## Code After:
from skvideo.version import __version__
# If you want to use Numpy's testing framerwork, use the following.
# Tests go under directory tests/, benchmarks under directory benchmarks/
from numpy.testing import Tester
test = Tester().test
bench = Tester().bench
| - from skvideo.stuff import *
from skvideo.version import __version__
# If you want to use Numpy's testing framerwork, use the following.
# Tests go under directory tests/, benchmarks under directory benchmarks/
from numpy.testing import Tester
test = Tester().test
bench = Tester().bench | 1 | 0.125 | 0 | 1 |
82b71ecaf0c5b91a61e883351096ab957ae601d0 | tests/unit/LoginExceptionTest.php | tests/unit/LoginExceptionTest.php | <?php
use Common\Exception\LoginException;
class LoginExceptionTest extends \Codeception\TestCase\Test
{
use Codeception\Specify;
public function testLoginExceptionIsThrown()
{
$this->specify('', function() {
throw new LoginException('I am a test');
}, ['throws' => 'Common\Exception\LoginException']);
}
} | <?php
use Common\Exception\LoginException;
class LoginExceptionTest extends \Codeception\TestCase\Test
{
use Codeception\Specify;
public function testLoginExceptionIsThrown()
{
$this->specify('Throw LoginException', function() {
throw new LoginException('I am a test');
}, ['throws' => 'Common\Exception\LoginException']);
}
} | Add comment to test message | Add comment to test message
| PHP | mit | dcritchlow/Project1,dcritchlow/Project1 | php | ## Code Before:
<?php
use Common\Exception\LoginException;
class LoginExceptionTest extends \Codeception\TestCase\Test
{
use Codeception\Specify;
public function testLoginExceptionIsThrown()
{
$this->specify('', function() {
throw new LoginException('I am a test');
}, ['throws' => 'Common\Exception\LoginException']);
}
}
## Instruction:
Add comment to test message
## Code After:
<?php
use Common\Exception\LoginException;
class LoginExceptionTest extends \Codeception\TestCase\Test
{
use Codeception\Specify;
public function testLoginExceptionIsThrown()
{
$this->specify('Throw LoginException', function() {
throw new LoginException('I am a test');
}, ['throws' => 'Common\Exception\LoginException']);
}
} | <?php
use Common\Exception\LoginException;
class LoginExceptionTest extends \Codeception\TestCase\Test
{
use Codeception\Specify;
public function testLoginExceptionIsThrown()
{
- $this->specify('', function() {
+ $this->specify('Throw LoginException', function() {
? ++++++++++++++++++++
throw new LoginException('I am a test');
}, ['throws' => 'Common\Exception\LoginException']);
}
} | 2 | 0.133333 | 1 | 1 |
96a8c09c07c075c3f2534ab2efb2464a2d65dbd7 | gradle/wrapper/gradle-wrapper.properties | gradle/wrapper/gradle-wrapper.properties |
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionSha256Sum=6f6cfdbb12a577c3845522a1c7fbfe1295ea05d87edabedd4e23fd2bf02b88b1
distributionUrl=https\://services.gradle.org/distributions/gradle-6.0.1-all.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
|
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionSha256Sum=634f972af958e3c753aeb42d7a688fab6820b527a0aef9eed03d7f3f6f9c7c06
distributionUrl=https\://services.gradle.org/distributions/gradle-6.1-all.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
| Update Gradle: 6.0.1 -> 6.1 | Update Gradle: 6.0.1 -> 6.1
| INI | apache-2.0 | benbenw/jmeter,apache/jmeter,benbenw/jmeter,etnetera/jmeter,ham1/jmeter,apache/jmeter,ham1/jmeter,benbenw/jmeter,ham1/jmeter,ham1/jmeter,apache/jmeter,etnetera/jmeter,ham1/jmeter,apache/jmeter,etnetera/jmeter,benbenw/jmeter,etnetera/jmeter,apache/jmeter,etnetera/jmeter | ini | ## Code Before:
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionSha256Sum=6f6cfdbb12a577c3845522a1c7fbfe1295ea05d87edabedd4e23fd2bf02b88b1
distributionUrl=https\://services.gradle.org/distributions/gradle-6.0.1-all.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
## Instruction:
Update Gradle: 6.0.1 -> 6.1
## Code After:
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionSha256Sum=634f972af958e3c753aeb42d7a688fab6820b527a0aef9eed03d7f3f6f9c7c06
distributionUrl=https\://services.gradle.org/distributions/gradle-6.1-all.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
|
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
- distributionSha256Sum=6f6cfdbb12a577c3845522a1c7fbfe1295ea05d87edabedd4e23fd2bf02b88b1
+ distributionSha256Sum=634f972af958e3c753aeb42d7a688fab6820b527a0aef9eed03d7f3f6f9c7c06
- distributionUrl=https\://services.gradle.org/distributions/gradle-6.0.1-all.zip
? --
+ distributionUrl=https\://services.gradle.org/distributions/gradle-6.1-all.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists | 4 | 0.571429 | 2 | 2 |
e1ffdcc5f12be623633e2abab2041fcb574173ea | homeassistant/components/zeroconf.py | homeassistant/components/zeroconf.py | import logging
import socket
from homeassistant.const import (EVENT_HOMEASSISTANT_STOP, __version__)
REQUIREMENTS = ["zeroconf==0.17.5"]
_LOGGER = logging.getLogger(__name__)
DOMAIN = "zeroconf"
ZEROCONF_TYPE = "_home-assistant._tcp.local."
DEPENDENCIES = ["http"]
def setup(hass, config):
"""Set up Zeroconf and make Home Assistant discoverable."""
from zeroconf import Zeroconf, ServiceInfo
zeroconf = Zeroconf()
zeroconf_name = "{}.{}".format(hass.config.location_name,
ZEROCONF_TYPE)
params = {"version": __version__, "base_url": hass.http.base_url,
"needs_auth": (hass.http.api_password != "")}
info = ServiceInfo(ZEROCONF_TYPE, zeroconf_name,
socket.inet_aton(hass.http.routable_address),
hass.http.server_address[1], 0, 0, params)
zeroconf.register_service(info)
def stop_zeroconf(event):
"""Stop Zeroconf."""
zeroconf.unregister_service(info)
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, stop_zeroconf)
return True
| import logging
import socket
from homeassistant.const import (EVENT_HOMEASSISTANT_STOP, __version__)
REQUIREMENTS = ["zeroconf==0.17.5"]
DEPENDENCIES = ["api"]
_LOGGER = logging.getLogger(__name__)
DOMAIN = "zeroconf"
ZEROCONF_TYPE = "_home-assistant._tcp.local."
def setup(hass, config):
"""Set up Zeroconf and make Home Assistant discoverable."""
from zeroconf import Zeroconf, ServiceInfo
zeroconf = Zeroconf()
zeroconf_name = "{}.{}".format(hass.config.location_name,
ZEROCONF_TYPE)
params = {"version": __version__, "base_url": hass.config.api.base_url,
"needs_auth": (hass.config.api.api_password != "")}
info = ServiceInfo(ZEROCONF_TYPE, zeroconf_name,
socket.inet_aton(hass.config.api.host),
hass.config.api.port, 0, 0, params)
zeroconf.register_service(info)
def stop_zeroconf(event):
"""Stop Zeroconf."""
zeroconf.unregister_service(info)
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, stop_zeroconf)
return True
| Use hass.config.api instead of hass.http | Use hass.config.api instead of hass.http
| Python | mit | miniconfig/home-assistant,Julian/home-assistant,toddeye/home-assistant,ct-23/home-assistant,deisi/home-assistant,tchellomello/home-assistant,rohitranjan1991/home-assistant,Julian/home-assistant,Duoxilian/home-assistant,betrisey/home-assistant,keerts/home-assistant,ct-23/home-assistant,tboyce021/home-assistant,kyvinh/home-assistant,leoc/home-assistant,jawilson/home-assistant,JshWright/home-assistant,turbokongen/home-assistant,varunr047/homefile,open-homeautomation/home-assistant,betrisey/home-assistant,GenericStudent/home-assistant,alexmogavero/home-assistant,fbradyirl/home-assistant,eagleamon/home-assistant,kyvinh/home-assistant,toddeye/home-assistant,auduny/home-assistant,aronsky/home-assistant,hmronline/home-assistant,bdfoster/blumate,jaharkes/home-assistant,fbradyirl/home-assistant,srcLurker/home-assistant,morphis/home-assistant,deisi/home-assistant,LinuxChristian/home-assistant,hexxter/home-assistant,sdague/home-assistant,LinuxChristian/home-assistant,shaftoe/home-assistant,stefan-jonasson/home-assistant,adrienbrault/home-assistant,w1ll1am23/home-assistant,tinloaf/home-assistant,soldag/home-assistant,molobrakos/home-assistant,Zyell/home-assistant,HydrelioxGitHub/home-assistant,keerts/home-assistant,molobrakos/home-assistant,varunr047/homefile,jaharkes/home-assistant,xifle/home-assistant,Smart-Torvy/torvy-home-assistant,stefan-jonasson/home-assistant,tboyce1/home-assistant,kennedyshead/home-assistant,sffjunkie/home-assistant,kennedyshead/home-assistant,jnewland/home-assistant,tinloaf/home-assistant,dmeulen/home-assistant,xifle/home-assistant,mikaelboman/home-assistant,Zac-HD/home-assistant,hmronline/home-assistant,partofthething/home-assistant,robjohnson189/home-assistant,lukas-hetzenecker/home-assistant,nugget/home-assistant,aequitas/home-assistant,oandrew/home-assistant,morphis/home-assistant,eagleamon/home-assistant,HydrelioxGitHub/home-assistant,Teagan42/home-assistant,ct-23/home-assistant,Zyell/home-assistant,sffjunkie/home-assistant,tinloaf/home-assistant,philipbl/home-assistant,kyvinh/home-assistant,Julian/home-assistant,PetePriority/home-assistant,sffjunkie/home-assistant,bdfoster/blumate,Teagan42/home-assistant,rohitranjan1991/home-assistant,deisi/home-assistant,miniconfig/home-assistant,robjohnson189/home-assistant,happyleavesaoc/home-assistant,morphis/home-assistant,GenericStudent/home-assistant,soldag/home-assistant,deisi/home-assistant,Duoxilian/home-assistant,persandstrom/home-assistant,w1ll1am23/home-assistant,oandrew/home-assistant,mezz64/home-assistant,DavidLP/home-assistant,morphis/home-assistant,tboyce1/home-assistant,florianholzapfel/home-assistant,Danielhiversen/home-assistant,home-assistant/home-assistant,florianholzapfel/home-assistant,srcLurker/home-assistant,stefan-jonasson/home-assistant,Smart-Torvy/torvy-home-assistant,sdague/home-assistant,kyvinh/home-assistant,balloob/home-assistant,happyleavesaoc/home-assistant,MartinHjelmare/home-assistant,happyleavesaoc/home-assistant,adrienbrault/home-assistant,ewandor/home-assistant,nugget/home-assistant,xifle/home-assistant,MungoRae/home-assistant,robbiet480/home-assistant,MartinHjelmare/home-assistant,alexmogavero/home-assistant,leppa/home-assistant,open-homeautomation/home-assistant,leoc/home-assistant,LinuxChristian/home-assistant,leoc/home-assistant,MartinHjelmare/home-assistant,Julian/home-assistant,miniconfig/home-assistant,lukas-hetzenecker/home-assistant,mikaelboman/home-assistant,joopert/home-assistant,Zac-HD/home-assistant,auduny/home-assistant,Zac-HD/home-assistant,joopert/home-assistant,ma314smith/home-assistant,Zyell/home-assistant,devdelay/home-assistant,srcLurker/home-assistant,hexxter/home-assistant,HydrelioxGitHub/home-assistant,mikaelboman/home-assistant,aequitas/home-assistant,qedi-r/home-assistant,nkgilley/home-assistant,jnewland/home-assistant,deisi/home-assistant,leoc/home-assistant,jaharkes/home-assistant,varunr047/homefile,nugget/home-assistant,hexxter/home-assistant,sander76/home-assistant,fbradyirl/home-assistant,partofthething/home-assistant,jamespcole/home-assistant,persandstrom/home-assistant,jnewland/home-assistant,balloob/home-assistant,leppa/home-assistant,bdfoster/blumate,keerts/home-assistant,shaftoe/home-assistant,pschmitt/home-assistant,philipbl/home-assistant,tboyce1/home-assistant,betrisey/home-assistant,hmronline/home-assistant,sander76/home-assistant,xifle/home-assistant,JshWright/home-assistant,oandrew/home-assistant,PetePriority/home-assistant,varunr047/homefile,dmeulen/home-assistant,tboyce021/home-assistant,alexmogavero/home-assistant,DavidLP/home-assistant,persandstrom/home-assistant,LinuxChristian/home-assistant,philipbl/home-assistant,JshWright/home-assistant,FreekingDean/home-assistant,robbiet480/home-assistant,jabesq/home-assistant,ct-23/home-assistant,hmronline/home-assistant,ma314smith/home-assistant,Smart-Torvy/torvy-home-assistant,JshWright/home-assistant,home-assistant/home-assistant,shaftoe/home-assistant,devdelay/home-assistant,emilhetty/home-assistant,emilhetty/home-assistant,Smart-Torvy/torvy-home-assistant,mKeRix/home-assistant,mKeRix/home-assistant,Danielhiversen/home-assistant,stefan-jonasson/home-assistant,bdfoster/blumate,sffjunkie/home-assistant,bdfoster/blumate,jawilson/home-assistant,florianholzapfel/home-assistant,MungoRae/home-assistant,jaharkes/home-assistant,Cinntax/home-assistant,aronsky/home-assistant,auduny/home-assistant,robjohnson189/home-assistant,florianholzapfel/home-assistant,emilhetty/home-assistant,devdelay/home-assistant,sffjunkie/home-assistant,MungoRae/home-assistant,mikaelboman/home-assistant,Duoxilian/home-assistant,Duoxilian/home-assistant,dmeulen/home-assistant,betrisey/home-assistant,ma314smith/home-assistant,open-homeautomation/home-assistant,DavidLP/home-assistant,robjohnson189/home-assistant,dmeulen/home-assistant,mikaelboman/home-assistant,jamespcole/home-assistant,philipbl/home-assistant,jabesq/home-assistant,emilhetty/home-assistant,oandrew/home-assistant,rohitranjan1991/home-assistant,MungoRae/home-assistant,mKeRix/home-assistant,shaftoe/home-assistant,titilambert/home-assistant,MungoRae/home-assistant,varunr047/homefile,balloob/home-assistant,molobrakos/home-assistant,postlund/home-assistant,happyleavesaoc/home-assistant,mezz64/home-assistant,Zac-HD/home-assistant,ct-23/home-assistant,open-homeautomation/home-assistant,ma314smith/home-assistant,FreekingDean/home-assistant,mKeRix/home-assistant,qedi-r/home-assistant,miniconfig/home-assistant,ewandor/home-assistant,srcLurker/home-assistant,ewandor/home-assistant,PetePriority/home-assistant,postlund/home-assistant,jabesq/home-assistant,alexmogavero/home-assistant,aequitas/home-assistant,devdelay/home-assistant,hexxter/home-assistant,Cinntax/home-assistant,titilambert/home-assistant,emilhetty/home-assistant,tchellomello/home-assistant,keerts/home-assistant,LinuxChristian/home-assistant,eagleamon/home-assistant,nkgilley/home-assistant,hmronline/home-assistant,jamespcole/home-assistant,pschmitt/home-assistant,tboyce1/home-assistant,eagleamon/home-assistant,turbokongen/home-assistant | python | ## Code Before:
import logging
import socket
from homeassistant.const import (EVENT_HOMEASSISTANT_STOP, __version__)
REQUIREMENTS = ["zeroconf==0.17.5"]
_LOGGER = logging.getLogger(__name__)
DOMAIN = "zeroconf"
ZEROCONF_TYPE = "_home-assistant._tcp.local."
DEPENDENCIES = ["http"]
def setup(hass, config):
"""Set up Zeroconf and make Home Assistant discoverable."""
from zeroconf import Zeroconf, ServiceInfo
zeroconf = Zeroconf()
zeroconf_name = "{}.{}".format(hass.config.location_name,
ZEROCONF_TYPE)
params = {"version": __version__, "base_url": hass.http.base_url,
"needs_auth": (hass.http.api_password != "")}
info = ServiceInfo(ZEROCONF_TYPE, zeroconf_name,
socket.inet_aton(hass.http.routable_address),
hass.http.server_address[1], 0, 0, params)
zeroconf.register_service(info)
def stop_zeroconf(event):
"""Stop Zeroconf."""
zeroconf.unregister_service(info)
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, stop_zeroconf)
return True
## Instruction:
Use hass.config.api instead of hass.http
## Code After:
import logging
import socket
from homeassistant.const import (EVENT_HOMEASSISTANT_STOP, __version__)
REQUIREMENTS = ["zeroconf==0.17.5"]
DEPENDENCIES = ["api"]
_LOGGER = logging.getLogger(__name__)
DOMAIN = "zeroconf"
ZEROCONF_TYPE = "_home-assistant._tcp.local."
def setup(hass, config):
"""Set up Zeroconf and make Home Assistant discoverable."""
from zeroconf import Zeroconf, ServiceInfo
zeroconf = Zeroconf()
zeroconf_name = "{}.{}".format(hass.config.location_name,
ZEROCONF_TYPE)
params = {"version": __version__, "base_url": hass.config.api.base_url,
"needs_auth": (hass.config.api.api_password != "")}
info = ServiceInfo(ZEROCONF_TYPE, zeroconf_name,
socket.inet_aton(hass.config.api.host),
hass.config.api.port, 0, 0, params)
zeroconf.register_service(info)
def stop_zeroconf(event):
"""Stop Zeroconf."""
zeroconf.unregister_service(info)
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, stop_zeroconf)
return True
| import logging
import socket
from homeassistant.const import (EVENT_HOMEASSISTANT_STOP, __version__)
REQUIREMENTS = ["zeroconf==0.17.5"]
+ DEPENDENCIES = ["api"]
+
_LOGGER = logging.getLogger(__name__)
DOMAIN = "zeroconf"
ZEROCONF_TYPE = "_home-assistant._tcp.local."
-
- DEPENDENCIES = ["http"]
def setup(hass, config):
"""Set up Zeroconf and make Home Assistant discoverable."""
from zeroconf import Zeroconf, ServiceInfo
zeroconf = Zeroconf()
zeroconf_name = "{}.{}".format(hass.config.location_name,
ZEROCONF_TYPE)
- params = {"version": __version__, "base_url": hass.http.base_url,
? ^^^
+ params = {"version": __version__, "base_url": hass.config.api.base_url,
? ^^^^^^^^ +
- "needs_auth": (hass.http.api_password != "")}
? ^^^
+ "needs_auth": (hass.config.api.api_password != "")}
? ^^^^^^^^ +
info = ServiceInfo(ZEROCONF_TYPE, zeroconf_name,
- socket.inet_aton(hass.http.routable_address),
? -------------------
+ socket.inet_aton(hass.config.api.host),
? +++++++++++ ++
- hass.http.server_address[1], 0, 0, params)
+ hass.config.api.port, 0, 0, params)
zeroconf.register_service(info)
def stop_zeroconf(event):
"""Stop Zeroconf."""
zeroconf.unregister_service(info)
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, stop_zeroconf)
return True | 12 | 0.292683 | 6 | 6 |
51803fc354cdc64cbb3e7dcedf06c28e0189f11c | jest.config.js | jest.config.js | export default {
testEnvironment: 'jsdom',
// Display coverage summary below file-by-file coverage breakdown
coverageReporters: ['clover', 'json', 'lcov', 'html', 'text', 'text-summary'],
transform: {
'\\.js$': ['rollup-jest'],
'\\.pegjs$': ['pegjs-jest-transformer']
}
};
| export default {
testEnvironment: 'jsdom',
// Display coverage summary below file-by-file coverage breakdown
coverageReporters: ['clover', 'json', 'lcov', 'html', 'text', 'text-summary'],
transform: {
'\\.js$': ['rollup-jest'],
'\\.pegjs$': ['pegjs-jest-transformer']
},
coveragePathIgnorePatterns: ['\\.pegjs']
};
| Exclude PegJS grammar from code coverage | Exclude PegJS grammar from code coverage
| JavaScript | mit | caleb531/truthy,caleb531/truthy | javascript | ## Code Before:
export default {
testEnvironment: 'jsdom',
// Display coverage summary below file-by-file coverage breakdown
coverageReporters: ['clover', 'json', 'lcov', 'html', 'text', 'text-summary'],
transform: {
'\\.js$': ['rollup-jest'],
'\\.pegjs$': ['pegjs-jest-transformer']
}
};
## Instruction:
Exclude PegJS grammar from code coverage
## Code After:
export default {
testEnvironment: 'jsdom',
// Display coverage summary below file-by-file coverage breakdown
coverageReporters: ['clover', 'json', 'lcov', 'html', 'text', 'text-summary'],
transform: {
'\\.js$': ['rollup-jest'],
'\\.pegjs$': ['pegjs-jest-transformer']
},
coveragePathIgnorePatterns: ['\\.pegjs']
};
| export default {
testEnvironment: 'jsdom',
// Display coverage summary below file-by-file coverage breakdown
coverageReporters: ['clover', 'json', 'lcov', 'html', 'text', 'text-summary'],
transform: {
'\\.js$': ['rollup-jest'],
'\\.pegjs$': ['pegjs-jest-transformer']
- }
+ },
? +
+ coveragePathIgnorePatterns: ['\\.pegjs']
}; | 3 | 0.333333 | 2 | 1 |
7e48ab7939edc5fb4b3b2d20d91a954b0fb02d40 | config/plugins/syntastic.vim | config/plugins/syntastic.vim | let g:syntastic_ruby_checkers = ['rubocop']
let g:syntastic_ruby_rubocop_args = '--auto-correct'
let g:syntastic_javascript_checkers = ['eslint']
let g:syntastic_javascript_eslint_args = '--fix'
set statusline+=%#warningmsg#
set statusline+=%{SyntasticStatuslineFlag()}
set statusline+=%*
let g:syntastic_always_populate_loc_list = 1
let g:syntastic_auto_loc_list = 0
let g:syntastic_check_on_open = 1
let g:syntastic_check_on_wq = 1
let g:syntastic_warning_symbol = '⚠️'
let g:syntastic_error_symbol = '💩'
| let g:syntastic_ruby_checkers = ['rubocop']
let g:syntastic_ruby_rubocop_args = '--auto-correct'
let g:syntastic_javascript_checkers = ['eslint']
let g:syntastic_javascript_eslint_args = '--fix'
let g:syntastic_always_populate_loc_list = 1
let g:syntastic_auto_loc_list = 0
let g:syntastic_check_on_open = 1
let g:syntastic_check_on_wq = 1
let g:syntastic_warning_symbol = '⚠️'
let g:syntastic_error_symbol = '💩'
| Remove Syntastic statusline addition that caused blank statusline | Remove Syntastic statusline addition that caused blank statusline
| VimL | mit | Casecommons/vim-config | viml | ## Code Before:
let g:syntastic_ruby_checkers = ['rubocop']
let g:syntastic_ruby_rubocop_args = '--auto-correct'
let g:syntastic_javascript_checkers = ['eslint']
let g:syntastic_javascript_eslint_args = '--fix'
set statusline+=%#warningmsg#
set statusline+=%{SyntasticStatuslineFlag()}
set statusline+=%*
let g:syntastic_always_populate_loc_list = 1
let g:syntastic_auto_loc_list = 0
let g:syntastic_check_on_open = 1
let g:syntastic_check_on_wq = 1
let g:syntastic_warning_symbol = '⚠️'
let g:syntastic_error_symbol = '💩'
## Instruction:
Remove Syntastic statusline addition that caused blank statusline
## Code After:
let g:syntastic_ruby_checkers = ['rubocop']
let g:syntastic_ruby_rubocop_args = '--auto-correct'
let g:syntastic_javascript_checkers = ['eslint']
let g:syntastic_javascript_eslint_args = '--fix'
let g:syntastic_always_populate_loc_list = 1
let g:syntastic_auto_loc_list = 0
let g:syntastic_check_on_open = 1
let g:syntastic_check_on_wq = 1
let g:syntastic_warning_symbol = '⚠️'
let g:syntastic_error_symbol = '💩'
| let g:syntastic_ruby_checkers = ['rubocop']
let g:syntastic_ruby_rubocop_args = '--auto-correct'
let g:syntastic_javascript_checkers = ['eslint']
let g:syntastic_javascript_eslint_args = '--fix'
-
- set statusline+=%#warningmsg#
- set statusline+=%{SyntasticStatuslineFlag()}
- set statusline+=%*
let g:syntastic_always_populate_loc_list = 1
let g:syntastic_auto_loc_list = 0
let g:syntastic_check_on_open = 1
let g:syntastic_check_on_wq = 1
let g:syntastic_warning_symbol = '⚠️'
let g:syntastic_error_symbol = '💩' | 4 | 0.235294 | 0 | 4 |
3dfe4781447560cec3ee1c9ea76d556f062e6adc | web/setup.php | web/setup.php | <?php
require('../vendor/autoload.php');
$max_data_length = 20010;
// ^ Calculated like so: len("789cae03b53fe955bc6746e7ee0663" + (":o331HlIeYrw:999:999"*999))
if (!array_key_exists('REDIS_URL', $_ENV))
$_ENV['REDIS_URL'] = "redis://h:@localhost:6379";
$redis = new Predis\Client([
'host' => parse_url($_ENV['REDIS_URL'], PHP_URL_HOST),
'port' => parse_url($_ENV['REDIS_URL'], PHP_URL_PORT),
'password' => parse_url($_ENV['REDIS_URL'], PHP_URL_PASS),
]);
| <?php
require('../vendor/autoload.php');
$max_data_length = 20010;
// ^ Calculated like so: len("789cae03b53fe955bc6746e7ee0663" + (":o331HlIeYrw:999:999"*999))
$redis_url = $_ENV['REDIS_URL'];
if (!isset($redis_url))
$redis_url = "redis://h:@localhost:6379";
$redis = new Predis\Client([
'host' => parse_url($redis_url, PHP_URL_HOST),
'port' => parse_url($redis_url, PHP_URL_PORT),
'password' => parse_url($redis_url, PHP_URL_PASS),
]);
| Fix redis environment variable overwriting | Fix redis environment variable overwriting
On production environments, this command seems to fail
| PHP | mit | andychase/radmontage,andychase/radmontage,andychase/radmontage | php | ## Code Before:
<?php
require('../vendor/autoload.php');
$max_data_length = 20010;
// ^ Calculated like so: len("789cae03b53fe955bc6746e7ee0663" + (":o331HlIeYrw:999:999"*999))
if (!array_key_exists('REDIS_URL', $_ENV))
$_ENV['REDIS_URL'] = "redis://h:@localhost:6379";
$redis = new Predis\Client([
'host' => parse_url($_ENV['REDIS_URL'], PHP_URL_HOST),
'port' => parse_url($_ENV['REDIS_URL'], PHP_URL_PORT),
'password' => parse_url($_ENV['REDIS_URL'], PHP_URL_PASS),
]);
## Instruction:
Fix redis environment variable overwriting
On production environments, this command seems to fail
## Code After:
<?php
require('../vendor/autoload.php');
$max_data_length = 20010;
// ^ Calculated like so: len("789cae03b53fe955bc6746e7ee0663" + (":o331HlIeYrw:999:999"*999))
$redis_url = $_ENV['REDIS_URL'];
if (!isset($redis_url))
$redis_url = "redis://h:@localhost:6379";
$redis = new Predis\Client([
'host' => parse_url($redis_url, PHP_URL_HOST),
'port' => parse_url($redis_url, PHP_URL_PORT),
'password' => parse_url($redis_url, PHP_URL_PASS),
]);
| <?php
require('../vendor/autoload.php');
$max_data_length = 20010;
// ^ Calculated like so: len("789cae03b53fe955bc6746e7ee0663" + (":o331HlIeYrw:999:999"*999))
- if (!array_key_exists('REDIS_URL', $_ENV))
+ $redis_url = $_ENV['REDIS_URL'];
+ if (!isset($redis_url))
- $_ENV['REDIS_URL'] = "redis://h:@localhost:6379";
? ^^^^^^^^^^^^^^^^
+ $redis_url = "redis://h:@localhost:6379";
? +++++ ^^^
$redis = new Predis\Client([
- 'host' => parse_url($_ENV['REDIS_URL'], PHP_URL_HOST),
? ^^^^^^^^^^^^^^^^
+ 'host' => parse_url($redis_url, PHP_URL_HOST),
? +++++ ^^^
- 'port' => parse_url($_ENV['REDIS_URL'], PHP_URL_PORT),
? ^^^^^^^^^^^^^^^^
+ 'port' => parse_url($redis_url, PHP_URL_PORT),
? +++++ ^^^
- 'password' => parse_url($_ENV['REDIS_URL'], PHP_URL_PASS),
? ^^^^^^^^^^^^^^^^
+ 'password' => parse_url($redis_url, PHP_URL_PASS),
? +++++ ^^^
]); | 11 | 0.733333 | 6 | 5 |
9f527681afbcd19610227fa981df1132ea143c8d | vendor/assets/stylesheets/editor/themes/editor/epic-light.css | vendor/assets/stylesheets/editor/themes/editor/epic-light.css | html { padding:10px; }
body {
border:0;
background:#fcfcfc;
font-family:monospace;
font-size:14px;
padding:10px;
line-height:1.35em;
margin:0;
padding:0;
}
| html { padding: 0px; }
body {
border: 2px solid #dce4ec;
background:#ECF0F1;
color: #222;
font-family:monospace;
font-size:14px;
padding:10px;
line-height:1.35em;
margin:0;
-webkit-border-radius: 6px;
border-radius: 6px;
}
| Update editor theme to work better with new page background color | Update editor theme to work better with new page background color
| CSS | mit | joshmcarthur/inquest,joshmcarthur/inquest | css | ## Code Before:
html { padding:10px; }
body {
border:0;
background:#fcfcfc;
font-family:monospace;
font-size:14px;
padding:10px;
line-height:1.35em;
margin:0;
padding:0;
}
## Instruction:
Update editor theme to work better with new page background color
## Code After:
html { padding: 0px; }
body {
border: 2px solid #dce4ec;
background:#ECF0F1;
color: #222;
font-family:monospace;
font-size:14px;
padding:10px;
line-height:1.35em;
margin:0;
-webkit-border-radius: 6px;
border-radius: 6px;
}
| - html { padding:10px; }
? ^
+ html { padding: 0px; }
? ^
body {
- border:0;
- background:#fcfcfc;
+ border: 2px solid #dce4ec;
+ background:#ECF0F1;
+ color: #222;
font-family:monospace;
font-size:14px;
padding:10px;
line-height:1.35em;
margin:0;
- padding:0;
+ -webkit-border-radius: 6px;
+ border-radius: 6px;
}
+ | 11 | 0.916667 | 7 | 4 |
e21260b57873ed70bd6b1690b62a754af58020fc | otp_twilio/migrations/0002_last_t.py | otp_twilio/migrations/0002_last_t.py | from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('otp_twilio_encrypted', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='twiliosmsdevice',
name='last_t',
field=models.BigIntegerField(default=-1, help_text='The t value of the latest verified token. The next token must be at a higher time step.'),
preserve_default=True,
),
]
| from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('otp_twilio', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='twiliosmsdevice',
name='last_t',
field=models.BigIntegerField(default=-1, help_text='The t value of the latest verified token. The next token must be at a higher time step.'),
preserve_default=True,
),
]
| Undo dependency name change in last migration | Undo dependency name change in last migration | Python | bsd-2-clause | prototypsthlm/otp_twilio_encrypted,gustavrannestig/otp_twilio_encrypted | python | ## Code Before:
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('otp_twilio_encrypted', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='twiliosmsdevice',
name='last_t',
field=models.BigIntegerField(default=-1, help_text='The t value of the latest verified token. The next token must be at a higher time step.'),
preserve_default=True,
),
]
## Instruction:
Undo dependency name change in last migration
## Code After:
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('otp_twilio', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='twiliosmsdevice',
name='last_t',
field=models.BigIntegerField(default=-1, help_text='The t value of the latest verified token. The next token must be at a higher time step.'),
preserve_default=True,
),
]
| from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
- ('otp_twilio_encrypted', '0001_initial'),
? ----------
+ ('otp_twilio', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='twiliosmsdevice',
name='last_t',
field=models.BigIntegerField(default=-1, help_text='The t value of the latest verified token. The next token must be at a higher time step.'),
preserve_default=True,
),
] | 2 | 0.105263 | 1 | 1 |
fabb06464a6c7cdd5900ac7883e577460a33b6bd | .fixtures.yml | .fixtures.yml | fixtures:
repositories:
"stdlib": "git://github.com/puppetlabs/puppetlabs-stdlib"
symlinks:
"mysql": "#{source_dir}"
| fixtures:
repositories:
"stdlib": "git://github.com/puppetlabs/puppetlabs-stdlib"
"create_resources": "git://github.com/puppetlabs/puppetlabs-create_resources.git"
symlinks:
"mysql": "#{source_dir}"
| Add create_resources to fix Travis CI failure on Puppet 2.6 | Add create_resources to fix Travis CI failure on Puppet 2.6
| YAML | apache-2.0 | 16nsk/puppetlabs-mysql,michaeltchapman/puppet-mysql,ndelic0/puppetlabs-mysql,justinstoller/puppetlabs-mysql,rdo-puppet-modules/puppetlabs-mysql,bhagyas/puppetlabs-mysql,carroarmato0/puppetlabs-mysql,edestecd/puppetlabs-mysql,michaeltchapman/puppet-mysql,haw-hh-ai-lab/puppetlabs-mysql,mhaskel/puppetlabs-mysql,redhat-cip/puppetlabs-mysql,gabriel403/puppetlabs-mysql,gibbsoft/puppetlabs-mysql,chris-mac/puppetmysql,igalic/puppetlabs-mysql,underscorgan/puppetlabs-mysql,tphoney/puppetlabs-mysql,chris-mac/puppetmysql,fuel-infra/puppetlabs-mysql,danzilio/puppetlabs-mysql,op-ct/puppetlabs-mysql,skyscrapers/puppetlabs-mysql,puppetlabs/puppetlabs-mysql,roman-mueller/puppetlabs-mysql,underscorgan/puppetlabs-mysql,xbezdick/puppetlabs-mysql,redhat-cip/puppetlabs-mysql,bhagyas/puppetlabs-mysql,puppetlabs/puppetlabs-mysql,synalabs/puppetlabs-mysql,danjung/puppetlabs-mysql,trovitsys/puppetlabs-mysql,hubspotdevops/puppetlabs-mysql,16nsk/puppetlabs-mysql,mcanevet/puppetlabs-mysql,simp/puppetlabs-mysql,hundredacres/puppetlabs-mysql,nickchappell/puppetlabs-mysql,jbondpdx/puppetlabs-mysql,hunner/puppetlabs-mysql,j-russell/puppetlabs-mysql,gwdg/puppetlabs-mysql,op-ct/puppetlabs-mysql,mhaskel/puppetlabs-mysql,jonnytpuppet/puppetlabs-mysql,gabriel403/puppetlabs-mysql,plumgrid/puppetlabs-mysql,apa-it/puppetlabs-mysql,rdo-puppet-modules/puppetlabs-mysql,synalabs/puppetlabs-mysql,ndelic0/puppetlabs-mysql,BioIQ/puppetlabs-mysql,danjung/puppetlabs-mysql,cdenneen/puppetlabs-mysql,hubspotdevops/puppetlabs-mysql,plumgrid/puppetlabs-mysql,danzilio/puppetlabs-mysql,BioIQ/puppetlabs-mysql,roidelapluie/puppetlabs-mysql,roman-mueller/puppetlabs-mysql,BioIQ/puppetlabs-mysql,bmjen/puppetlabs-mysql,j-russell/puppetlabs-mysql,carroarmato0/puppetlabs-mysql,Adaptavist/puppet-mysql,nickchappell/puppetlabs-mysql,danjung/puppetlabs-mysql,seattle-biomed/puppet-mysql,skyscrapers/puppetlabs-mysql,bmjen/puppetlabs-mysql,DavidS/puppetlabs-mysql,markasammut/puppetlabs-mysql,trovitsys/puppetlabs-mysql,fuel-infra/puppetlabs-mysql,haw-hh-ai-lab/puppetlabs-mysql,jbondpdx/puppetlabs-mysql,breuninger-ecom/puppetlabs-mysql,cyberious/puppetlabs-mysql,BioIQ/puppetlabs-mysql,gibbsoft/puppetlabs-mysql,Adaptavist/puppet-mysql,justinstoller/puppetlabs-mysql,squarit/puppetlabs-mysql,simp/puppetlabs-mysql,xbezdick/puppetlabs-mysql,breuninger-ecom/puppetlabs-mysql,danjung/puppetlabs-mysql,hunner/puppetlabs-mysql,seattle-biomed/puppet-mysql,tphoney/puppetlabs-mysql,jonnytpuppet/puppetlabs-mysql,squarit/puppetlabs-mysql,trovitsys/puppetlabs-mysql,gwdg/puppetlabs-mysql,DavidS/puppetlabs-mysql,hundredacres/puppetlabs-mysql,cyberious/puppetlabs-mysql,apa-it/puppetlabs-mysql,cdenneen/puppetlabs-mysql,igalic/puppetlabs-mysql,markasammut/puppetlabs-mysql,edestecd/puppetlabs-mysql,mcanevet/puppetlabs-mysql,roidelapluie/puppetlabs-mysql,breuninger-ecom/puppetlabs-mysql | yaml | ## Code Before:
fixtures:
repositories:
"stdlib": "git://github.com/puppetlabs/puppetlabs-stdlib"
symlinks:
"mysql": "#{source_dir}"
## Instruction:
Add create_resources to fix Travis CI failure on Puppet 2.6
## Code After:
fixtures:
repositories:
"stdlib": "git://github.com/puppetlabs/puppetlabs-stdlib"
"create_resources": "git://github.com/puppetlabs/puppetlabs-create_resources.git"
symlinks:
"mysql": "#{source_dir}"
| fixtures:
repositories:
"stdlib": "git://github.com/puppetlabs/puppetlabs-stdlib"
+ "create_resources": "git://github.com/puppetlabs/puppetlabs-create_resources.git"
symlinks:
"mysql": "#{source_dir}" | 1 | 0.2 | 1 | 0 |
c6d0cfe468166363450c61e479d6443bd3986d13 | src/test/resources/CompileTwice.java | src/test/resources/CompileTwice.java | public class CompileTwice {
public String message() {
return "Hi again!";
}
} | import java.util.Collections;
import java.util.List;
public class CompileTwice {
public List<String> message() {
return Collections.singletonList("Hi again!");
}
} | Verify imports work when compiling incrementally | Verify imports work when compiling incrementally
| Java | mit | georgewfraser/vscode-javac,georgewfraser/vscode-javac,georgewfraser/vscode-javac | java | ## Code Before:
public class CompileTwice {
public String message() {
return "Hi again!";
}
}
## Instruction:
Verify imports work when compiling incrementally
## Code After:
import java.util.Collections;
import java.util.List;
public class CompileTwice {
public List<String> message() {
return Collections.singletonList("Hi again!");
}
} | + import java.util.Collections;
+ import java.util.List;
+
public class CompileTwice {
- public String message() {
+ public List<String> message() {
? +++++ +
- return "Hi again!";
+ return Collections.singletonList("Hi again!");
}
} | 7 | 1.4 | 5 | 2 |
93d81115410caf4f4fab76c3cdf7d929862e2afb | app/www/states/doenca-list/doenca-list.html | app/www/states/doenca-list/doenca-list.html | <ion-view view-title="Doenças">
<ion-nav-buttons side="right">
<button class="button button-icon icon ion-ios-search-strong" ng-click="showFilterBar()"></button>
<button class="button button-icon icon"
ng-class="{'ion-android-add': platform.isAndroid(), 'ion-ios-plus-empty': !platform.isAndroid()}"
ng-click="new()"></button>
</ion-nav-buttons>
<ion-content>
<ion-refresher pulling-text="Pull to refresh" on-refresh="refresh(true)"></ion-refresher>
<ion-list>
<ion-item
class="item-remove-animate item-icon-right"
auto-list-divider
auto-list-divider-value='{{doenca.data | date:"dd/MM/yyyy"}}'
auto-list-divider-function="dividerFunction"
ng-repeat="doenca in doencas">
<span class="data">{{doenca.data | date:"h:mm"}}</span>
{{doenca.nome}}
<i class="icon ion-chevron-right icon-accessory"></i>
<ion-option-button class="button-energized" ng-click="edit(doenca)">
Edit
</ion-option-button>
<ion-option-button class="button-assertive" ng-click="remove(doenca)">
Delete
</ion-option-button>
</ion-item>
</ion-list>
</ion-content>
</ion-view>
| <ion-view view-title="Doenças">
<ion-nav-buttons side="right">
<button class="button button-icon icon ion-ios-search-strong" ng-click="showFilterBar()"></button>
<button class="button button-icon icon"
ng-class="{'ion-android-add': platform.isAndroid(), 'ion-ios-plus-empty': !platform.isAndroid()}"
ng-click="new()"></button>
</ion-nav-buttons>
<ion-content>
<ion-refresher pulling-text="Pull to refresh" on-refresh="refresh(true)" spinner="dots"></ion-refresher>
<ion-list>
<ion-item
class="item-remove-animate item-icon-right"
auto-list-divider
auto-list-divider-value='{{doenca.data | date:"dd/MM/yyyy"}}'
auto-list-divider-function="dividerFunction"
ng-repeat="doenca in doencas">
<span class="data">{{doenca.data | date:"h:mm"}}</span>
{{doenca.nome}}
<i class="icon ion-chevron-right icon-accessory"></i>
<ion-option-button class="button-energized" ng-click="edit(doenca)">
Edit
</ion-option-button>
<ion-option-button class="button-assertive" ng-click="remove(doenca)">
Delete
</ion-option-button>
</ion-item>
</ion-list>
</ion-content>
</ion-view>
| Change refresher spinner style to dots | Change refresher spinner style to dots
| HTML | mit | giorgiofellipe/app-historico-saude,giorgiofellipe/app-historico-saude | html | ## Code Before:
<ion-view view-title="Doenças">
<ion-nav-buttons side="right">
<button class="button button-icon icon ion-ios-search-strong" ng-click="showFilterBar()"></button>
<button class="button button-icon icon"
ng-class="{'ion-android-add': platform.isAndroid(), 'ion-ios-plus-empty': !platform.isAndroid()}"
ng-click="new()"></button>
</ion-nav-buttons>
<ion-content>
<ion-refresher pulling-text="Pull to refresh" on-refresh="refresh(true)"></ion-refresher>
<ion-list>
<ion-item
class="item-remove-animate item-icon-right"
auto-list-divider
auto-list-divider-value='{{doenca.data | date:"dd/MM/yyyy"}}'
auto-list-divider-function="dividerFunction"
ng-repeat="doenca in doencas">
<span class="data">{{doenca.data | date:"h:mm"}}</span>
{{doenca.nome}}
<i class="icon ion-chevron-right icon-accessory"></i>
<ion-option-button class="button-energized" ng-click="edit(doenca)">
Edit
</ion-option-button>
<ion-option-button class="button-assertive" ng-click="remove(doenca)">
Delete
</ion-option-button>
</ion-item>
</ion-list>
</ion-content>
</ion-view>
## Instruction:
Change refresher spinner style to dots
## Code After:
<ion-view view-title="Doenças">
<ion-nav-buttons side="right">
<button class="button button-icon icon ion-ios-search-strong" ng-click="showFilterBar()"></button>
<button class="button button-icon icon"
ng-class="{'ion-android-add': platform.isAndroid(), 'ion-ios-plus-empty': !platform.isAndroid()}"
ng-click="new()"></button>
</ion-nav-buttons>
<ion-content>
<ion-refresher pulling-text="Pull to refresh" on-refresh="refresh(true)" spinner="dots"></ion-refresher>
<ion-list>
<ion-item
class="item-remove-animate item-icon-right"
auto-list-divider
auto-list-divider-value='{{doenca.data | date:"dd/MM/yyyy"}}'
auto-list-divider-function="dividerFunction"
ng-repeat="doenca in doencas">
<span class="data">{{doenca.data | date:"h:mm"}}</span>
{{doenca.nome}}
<i class="icon ion-chevron-right icon-accessory"></i>
<ion-option-button class="button-energized" ng-click="edit(doenca)">
Edit
</ion-option-button>
<ion-option-button class="button-assertive" ng-click="remove(doenca)">
Delete
</ion-option-button>
</ion-item>
</ion-list>
</ion-content>
</ion-view>
| <ion-view view-title="Doenças">
<ion-nav-buttons side="right">
<button class="button button-icon icon ion-ios-search-strong" ng-click="showFilterBar()"></button>
<button class="button button-icon icon"
ng-class="{'ion-android-add': platform.isAndroid(), 'ion-ios-plus-empty': !platform.isAndroid()}"
ng-click="new()"></button>
</ion-nav-buttons>
<ion-content>
- <ion-refresher pulling-text="Pull to refresh" on-refresh="refresh(true)"></ion-refresher>
+ <ion-refresher pulling-text="Pull to refresh" on-refresh="refresh(true)" spinner="dots"></ion-refresher>
? +++++++++++++++
<ion-list>
<ion-item
class="item-remove-animate item-icon-right"
auto-list-divider
auto-list-divider-value='{{doenca.data | date:"dd/MM/yyyy"}}'
auto-list-divider-function="dividerFunction"
ng-repeat="doenca in doencas">
<span class="data">{{doenca.data | date:"h:mm"}}</span>
{{doenca.nome}}
<i class="icon ion-chevron-right icon-accessory"></i>
<ion-option-button class="button-energized" ng-click="edit(doenca)">
Edit
</ion-option-button>
<ion-option-button class="button-assertive" ng-click="remove(doenca)">
Delete
</ion-option-button>
</ion-item>
</ion-list>
</ion-content>
</ion-view> | 2 | 0.068966 | 1 | 1 |
cc7492f013f2416d7e6fd7cb1e15eec155f5822a | home/.emacs.d/lisp/init-yasnippet.el | home/.emacs.d/lisp/init-yasnippet.el | ;;; init-yasnippet.el -- yasnippet setup
;;; Commentary:
;;; yasnippet and custom snippets
;;; Code:
(require-package 'yasnippet)
(require 'yasnippet)
(yas-global-mode 1)
(setq yas-snippet-dirs
'("~/.emacs.d/snippets" "~/code/yasnippet-snippets"))
(diminish 'yas-minor-mode)
(provide 'init-yasnippet)
;;; init-yasnippet.el ends here
| ;;; init-yasnippet.el -- yasnippet setup
;;; Commentary:
;;; yasnippet and custom snippets
;;; Code:
(require-package 'yasnippet)
(require 'yasnippet)
(setq yas-snippet-dirs
'("~/.emacs.d/snippets" "~/code/yasnippet-snippets"))
(yas-global-mode 1)
(define-key yas-keymap (kbd "DEL") 'yas-skip-and-clear-or-delete-char)
(diminish 'yas-minor-mode)
(provide 'init-yasnippet)
;;; init-yasnippet.el ends here
| Fix Emacs YAS backspace in middle of snippet | Fix Emacs YAS backspace in middle of snippet
| Emacs Lisp | mit | cartolari/dotfiles,cartolari/dotfiles,cartolari/dotfiles,cartolari/dotfiles | emacs-lisp | ## Code Before:
;;; init-yasnippet.el -- yasnippet setup
;;; Commentary:
;;; yasnippet and custom snippets
;;; Code:
(require-package 'yasnippet)
(require 'yasnippet)
(yas-global-mode 1)
(setq yas-snippet-dirs
'("~/.emacs.d/snippets" "~/code/yasnippet-snippets"))
(diminish 'yas-minor-mode)
(provide 'init-yasnippet)
;;; init-yasnippet.el ends here
## Instruction:
Fix Emacs YAS backspace in middle of snippet
## Code After:
;;; init-yasnippet.el -- yasnippet setup
;;; Commentary:
;;; yasnippet and custom snippets
;;; Code:
(require-package 'yasnippet)
(require 'yasnippet)
(setq yas-snippet-dirs
'("~/.emacs.d/snippets" "~/code/yasnippet-snippets"))
(yas-global-mode 1)
(define-key yas-keymap (kbd "DEL") 'yas-skip-and-clear-or-delete-char)
(diminish 'yas-minor-mode)
(provide 'init-yasnippet)
;;; init-yasnippet.el ends here
| ;;; init-yasnippet.el -- yasnippet setup
;;; Commentary:
;;; yasnippet and custom snippets
;;; Code:
(require-package 'yasnippet)
(require 'yasnippet)
- (yas-global-mode 1)
-
(setq yas-snippet-dirs
'("~/.emacs.d/snippets" "~/code/yasnippet-snippets"))
+
+ (yas-global-mode 1)
+ (define-key yas-keymap (kbd "DEL") 'yas-skip-and-clear-or-delete-char)
(diminish 'yas-minor-mode)
(provide 'init-yasnippet)
;;; init-yasnippet.el ends here | 5 | 0.263158 | 3 | 2 |
ed5f20bb46ccd9571d1abb965ebc2792e1d0e5db | src/main/java/com/codeski/nbt/Main.java | src/main/java/com/codeski/nbt/Main.java | package com.codeski.nbt;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
public class Main
{
public static void main(String[] args) throws FileNotFoundException, IOException
{
File f = new File(args[0]);
System.out.println("Start reading " + f.getAbsolutePath() + "...");
System.out.println(new NBTReader(f).read());
System.out.println("Finished reading " + f.getAbsolutePath() + "...");
}
}
| package com.codeski.nbt;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import com.codeski.nbt.tags.NBTCompound;
public class Main
{
public static void main(String[] args) throws FileNotFoundException, IOException
{
File f = new File(args[0]);
System.out.println("Start reading " + f.getAbsolutePath() + "...");
NBTCompound root = (NBTCompound) new NBTReader(f).read();
System.out.println("Finished reading " + f.getAbsolutePath() + "...");
System.out.println();
System.out.println("{ " + root.toJSON() + " }");
System.out.println();
System.out.println("<?xml version=\"1.0\" encoding=\"UTF-8\"?>" + root.toXML());
System.out.println();
System.out.println("Done!");
}
}
| Clean up main class and show JSON and XML output | Clean up main class and show JSON and XML output | Java | mit | Codeski/NBT | java | ## Code Before:
package com.codeski.nbt;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
public class Main
{
public static void main(String[] args) throws FileNotFoundException, IOException
{
File f = new File(args[0]);
System.out.println("Start reading " + f.getAbsolutePath() + "...");
System.out.println(new NBTReader(f).read());
System.out.println("Finished reading " + f.getAbsolutePath() + "...");
}
}
## Instruction:
Clean up main class and show JSON and XML output
## Code After:
package com.codeski.nbt;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import com.codeski.nbt.tags.NBTCompound;
public class Main
{
public static void main(String[] args) throws FileNotFoundException, IOException
{
File f = new File(args[0]);
System.out.println("Start reading " + f.getAbsolutePath() + "...");
NBTCompound root = (NBTCompound) new NBTReader(f).read();
System.out.println("Finished reading " + f.getAbsolutePath() + "...");
System.out.println();
System.out.println("{ " + root.toJSON() + " }");
System.out.println();
System.out.println("<?xml version=\"1.0\" encoding=\"UTF-8\"?>" + root.toXML());
System.out.println();
System.out.println("Done!");
}
}
| package com.codeski.nbt;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
+
+ import com.codeski.nbt.tags.NBTCompound;
public class Main
{
public static void main(String[] args) throws FileNotFoundException, IOException
{
File f = new File(args[0]);
System.out.println("Start reading " + f.getAbsolutePath() + "...");
- System.out.println(new NBTReader(f).read());
+ NBTCompound root = (NBTCompound) new NBTReader(f).read();
System.out.println("Finished reading " + f.getAbsolutePath() + "...");
+ System.out.println();
+ System.out.println("{ " + root.toJSON() + " }");
+ System.out.println();
+ System.out.println("<?xml version=\"1.0\" encoding=\"UTF-8\"?>" + root.toXML());
+ System.out.println();
+ System.out.println("Done!");
}
} | 10 | 0.625 | 9 | 1 |
db44a149930ea3c98bb6783fa12c4894c110426d | README.md | README.md | <<<<<<< HEAD
falance
=======
An ongoing finacial app based on Node.js.
[http://app1-yusnet.rhcloud.com]
=======
The OpenShift `nodejs` cartridge documentation can be found at:
https://github.com/openshift/origin-server/tree/master/cartridges/openshift-origin-cartridge-nodejs/README.md
>>>>>>> Creating template
| <<<<<<< HEAD
<<<<<<< HEAD
falance
=======
An ongoing finacial app based on Node.js.
[http://app1-yusnet.rhcloud.com]
=======
The OpenShift `nodejs` cartridge documentation can be found at:
https://github.com/openshift/origin-server/tree/master/cartridges/openshift-origin-cartridge-nodejs/README.md
>>>>>>> Creating template
=======
=======
falance
=======
An ongoing finacial project based on Node.js.
>>>>>>> Add license and gitignore etc.
| Add license and gitignore etc. | Add license and gitignore etc.
| Markdown | apache-2.0 | lany/falance,lany/falance | markdown | ## Code Before:
<<<<<<< HEAD
falance
=======
An ongoing finacial app based on Node.js.
[http://app1-yusnet.rhcloud.com]
=======
The OpenShift `nodejs` cartridge documentation can be found at:
https://github.com/openshift/origin-server/tree/master/cartridges/openshift-origin-cartridge-nodejs/README.md
>>>>>>> Creating template
## Instruction:
Add license and gitignore etc.
## Code After:
<<<<<<< HEAD
<<<<<<< HEAD
falance
=======
An ongoing finacial app based on Node.js.
[http://app1-yusnet.rhcloud.com]
=======
The OpenShift `nodejs` cartridge documentation can be found at:
https://github.com/openshift/origin-server/tree/master/cartridges/openshift-origin-cartridge-nodejs/README.md
>>>>>>> Creating template
=======
=======
falance
=======
An ongoing finacial project based on Node.js.
>>>>>>> Add license and gitignore etc.
| + <<<<<<< HEAD
<<<<<<< HEAD
falance
=======
An ongoing finacial app based on Node.js.
[http://app1-yusnet.rhcloud.com]
=======
The OpenShift `nodejs` cartridge documentation can be found at:
https://github.com/openshift/origin-server/tree/master/cartridges/openshift-origin-cartridge-nodejs/README.md
>>>>>>> Creating template
+ =======
+ =======
+ falance
+ =======
+
+ An ongoing finacial project based on Node.js.
+ >>>>>>> Add license and gitignore etc. | 8 | 0.666667 | 8 | 0 |
4b191ff9e6046f883cd24eaf64e0c42e930b9030 | src/components/Nav.js | src/components/Nav.js | import React from 'react'
const Nav = () => {
return (
<ul>
<div className="navbar-left">
<li><a href="">Home</a></li>
<li><a href="">Households</a></li>
</div>
<div className="navbar-right">
<li><a href="" className="right">Sign Up</a></li>
<li><a href="" className="right">Login</a></li>
</div>
</ul>
)
}
export default Nav | import React from 'react'
import {
Link
} from 'react-router-dom'
const Nav = () => {
return (
<ul>
<div className="navbar-left">
<li><Link to="/">Home</Link></li>
<li><Link to="/households">Households</Link></li>
</div>
<div className="navbar-right">
<li><a href="" className="right">Sign Up</a></li>
<li><a href="" className="right">Login</a></li>
</div>
</ul>
)
}
export default Nav | Update nav links to use Link component | Update nav links to use Link component
| JavaScript | mit | cernanb/personal-chef-react-app,cernanb/personal-chef-react-app | javascript | ## Code Before:
import React from 'react'
const Nav = () => {
return (
<ul>
<div className="navbar-left">
<li><a href="">Home</a></li>
<li><a href="">Households</a></li>
</div>
<div className="navbar-right">
<li><a href="" className="right">Sign Up</a></li>
<li><a href="" className="right">Login</a></li>
</div>
</ul>
)
}
export default Nav
## Instruction:
Update nav links to use Link component
## Code After:
import React from 'react'
import {
Link
} from 'react-router-dom'
const Nav = () => {
return (
<ul>
<div className="navbar-left">
<li><Link to="/">Home</Link></li>
<li><Link to="/households">Households</Link></li>
</div>
<div className="navbar-right">
<li><a href="" className="right">Sign Up</a></li>
<li><a href="" className="right">Login</a></li>
</div>
</ul>
)
}
export default Nav | import React from 'react'
+ import {
+ Link
+ } from 'react-router-dom'
const Nav = () => {
return (
<ul>
<div className="navbar-left">
- <li><a href="">Home</a></li>
? ^ ^^^^ ^
+ <li><Link to="/">Home</Link></li>
? + ^^^^ ^^ + ^^^^
- <li><a href="">Households</a></li>
+ <li><Link to="/households">Households</Link></li>
</div>
<div className="navbar-right">
<li><a href="" className="right">Sign Up</a></li>
<li><a href="" className="right">Login</a></li>
</div>
</ul>
)
}
export default Nav | 7 | 0.388889 | 5 | 2 |
da990bff61c0088f239defac486da1303f97c08a | app/admin/routes.py | app/admin/routes.py | from flask import render_template, redirect, url_for, flash, request
from flask.ext.login import login_required, current_user
from . import admin
from .forms import ProfileForm
from .. import db
from ..models import User
@admin.route('/')
@login_required
def index():
return render_template('admin/user.html', user=current_user)
@admin.route('/edit_user', methods=['GET', 'POST'])
@login_required
def edit_user():
form = ProfileForm()
if form.validate_on_submit():
current_user.name = form.name.data
current_user.location = form.location.data
current_user.bio = form.bio.data
db.session.add(current_user._get_current_object())
db.session.commit()
flash("Síðan hefur verið uppfærð")
return redirect(url_for('admin.index'))
form.name.data = current_user.name
form.location.data = current_user.location
form.bio.data = current_user.bio
return render_template('admin/edit_user.html', form=form)
| from flask import render_template, redirect, url_for, flash, request
from flask.ext.login import login_required, current_user
from . import admin
from .forms import ProfileForm
from .. import db
from ..models import User
@admin.route('/')
@login_required
def index():
return render_template('admin/user.html', user=current_user)
@admin.route('/edit_user', methods=['GET', 'POST'])
@login_required
def edit_user():
form = ProfileForm()
if form.validate_on_submit():
current_user.name = form.name.data
current_user.location = form.location.data
current_user.bio = form.bio.data
db.session.add(current_user._get_current_object())
db.session.commit()
flash("Síðan hefur verið uppfærð")
return redirect(url_for('admin.index'))
form.name.data = current_user.name
form.location.data = current_user.location
form.bio.data = current_user.bio
return render_template('admin/edit_user.html', form=form)
@admin.route('/news')
@login_required
def news():
return render_template('admin/news.html')
| Add a route to admin/news | Add a route to admin/news
| Python | mit | finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is | python | ## Code Before:
from flask import render_template, redirect, url_for, flash, request
from flask.ext.login import login_required, current_user
from . import admin
from .forms import ProfileForm
from .. import db
from ..models import User
@admin.route('/')
@login_required
def index():
return render_template('admin/user.html', user=current_user)
@admin.route('/edit_user', methods=['GET', 'POST'])
@login_required
def edit_user():
form = ProfileForm()
if form.validate_on_submit():
current_user.name = form.name.data
current_user.location = form.location.data
current_user.bio = form.bio.data
db.session.add(current_user._get_current_object())
db.session.commit()
flash("Síðan hefur verið uppfærð")
return redirect(url_for('admin.index'))
form.name.data = current_user.name
form.location.data = current_user.location
form.bio.data = current_user.bio
return render_template('admin/edit_user.html', form=form)
## Instruction:
Add a route to admin/news
## Code After:
from flask import render_template, redirect, url_for, flash, request
from flask.ext.login import login_required, current_user
from . import admin
from .forms import ProfileForm
from .. import db
from ..models import User
@admin.route('/')
@login_required
def index():
return render_template('admin/user.html', user=current_user)
@admin.route('/edit_user', methods=['GET', 'POST'])
@login_required
def edit_user():
form = ProfileForm()
if form.validate_on_submit():
current_user.name = form.name.data
current_user.location = form.location.data
current_user.bio = form.bio.data
db.session.add(current_user._get_current_object())
db.session.commit()
flash("Síðan hefur verið uppfærð")
return redirect(url_for('admin.index'))
form.name.data = current_user.name
form.location.data = current_user.location
form.bio.data = current_user.bio
return render_template('admin/edit_user.html', form=form)
@admin.route('/news')
@login_required
def news():
return render_template('admin/news.html')
| from flask import render_template, redirect, url_for, flash, request
from flask.ext.login import login_required, current_user
from . import admin
from .forms import ProfileForm
from .. import db
from ..models import User
@admin.route('/')
@login_required
def index():
return render_template('admin/user.html', user=current_user)
@admin.route('/edit_user', methods=['GET', 'POST'])
@login_required
def edit_user():
form = ProfileForm()
if form.validate_on_submit():
current_user.name = form.name.data
current_user.location = form.location.data
current_user.bio = form.bio.data
db.session.add(current_user._get_current_object())
db.session.commit()
flash("Síðan hefur verið uppfærð")
return redirect(url_for('admin.index'))
form.name.data = current_user.name
form.location.data = current_user.location
form.bio.data = current_user.bio
return render_template('admin/edit_user.html', form=form)
+ @admin.route('/news')
+ @login_required
+ def news():
+ return render_template('admin/news.html') | 4 | 0.108108 | 4 | 0 |
4aa38e7112fa47abfd7d62d26b0b2bb5d46d8f23 | .github/workflows/publish-release.yml | .github/workflows/publish-release.yml | name: Publish Release
on:
workflow_dispatch: {}
push:
branches:
- develop
pull_request:
branches:
- develop
jobs:
say_hello:
runs-on: ubuntu-latest
steps:
- run: |
echo "Hello 123!"
echo "- in 234!"
| name: Publish Release
on:
workflow_dispatch: {}
push:
branches:
- main
pull_request:
branches:
- main
jobs:
say_hello:
runs-on: ubuntu-latest
steps:
- run: |
echo "Hello 123!"
echo "- in 234!"
| Change branch name to main. | Change branch name to main.
| YAML | bsd-3-clause | DataBiosphere/terra-cli,DataBiosphere/terra-cli | yaml | ## Code Before:
name: Publish Release
on:
workflow_dispatch: {}
push:
branches:
- develop
pull_request:
branches:
- develop
jobs:
say_hello:
runs-on: ubuntu-latest
steps:
- run: |
echo "Hello 123!"
echo "- in 234!"
## Instruction:
Change branch name to main.
## Code After:
name: Publish Release
on:
workflow_dispatch: {}
push:
branches:
- main
pull_request:
branches:
- main
jobs:
say_hello:
runs-on: ubuntu-latest
steps:
- run: |
echo "Hello 123!"
echo "- in 234!"
| name: Publish Release
on:
workflow_dispatch: {}
push:
branches:
- - develop
+ - main
pull_request:
branches:
- - develop
+ - main
jobs:
say_hello:
runs-on: ubuntu-latest
steps:
- run: |
echo "Hello 123!"
echo "- in 234!" | 4 | 0.235294 | 2 | 2 |
3d7482d7c4bce351fc2401419c8e452dfd1dee85 | src/Schema/Request/SearchAvailability.php | src/Schema/Request/SearchAvailability.php | <?php
namespace MssPhp\Schema\Request;
use JMS\Serializer\Annotation\Type;
use JMS\Serializer\Annotation\XmlList;
class SearchAvailability
{
/**
* @Type("DateTime<'Y-m-d'>")
*/
public $date_from;
/**
* @Type("DateTime<'Y-m-d'>")
*/
public $date_to;
/**
* @Type("array<integer>")
* @XmlList(inline = true, entry = "offer_id")
*/
public $offer_id;
/**
* @Type("array<integer>")
* @XmlList(inline = true, entry = "room_id")
*/
public $room_id;
}
| <?php
namespace MssPhp\Schema\Request;
use JMS\Serializer\Annotation\Type;
use JMS\Serializer\Annotation\XmlList;
class SearchAvailability
{
/**
* @Type("DateTime<'Y-m-d'>")
*/
public $date_from;
/**
* @Type("DateTime<'Y-m-d'>")
*/
public $date_to;
/**
* @Type("array<integer>")
* @XmlList(inline = true, entry = "offer_id")
*/
public $offer_id;
/**
* @Type("array<integer>")
* @XmlList(inline = true, entry = "room_id")
*/
public $room_id;
/**
* @Type("integer")
*/
public $typ;
}
| Add new search availability `typ` filter | Add new search availability `typ` filter
| PHP | mit | HGV/mss-php | php | ## Code Before:
<?php
namespace MssPhp\Schema\Request;
use JMS\Serializer\Annotation\Type;
use JMS\Serializer\Annotation\XmlList;
class SearchAvailability
{
/**
* @Type("DateTime<'Y-m-d'>")
*/
public $date_from;
/**
* @Type("DateTime<'Y-m-d'>")
*/
public $date_to;
/**
* @Type("array<integer>")
* @XmlList(inline = true, entry = "offer_id")
*/
public $offer_id;
/**
* @Type("array<integer>")
* @XmlList(inline = true, entry = "room_id")
*/
public $room_id;
}
## Instruction:
Add new search availability `typ` filter
## Code After:
<?php
namespace MssPhp\Schema\Request;
use JMS\Serializer\Annotation\Type;
use JMS\Serializer\Annotation\XmlList;
class SearchAvailability
{
/**
* @Type("DateTime<'Y-m-d'>")
*/
public $date_from;
/**
* @Type("DateTime<'Y-m-d'>")
*/
public $date_to;
/**
* @Type("array<integer>")
* @XmlList(inline = true, entry = "offer_id")
*/
public $offer_id;
/**
* @Type("array<integer>")
* @XmlList(inline = true, entry = "room_id")
*/
public $room_id;
/**
* @Type("integer")
*/
public $typ;
}
| <?php
namespace MssPhp\Schema\Request;
use JMS\Serializer\Annotation\Type;
use JMS\Serializer\Annotation\XmlList;
class SearchAvailability
{
/**
* @Type("DateTime<'Y-m-d'>")
*/
public $date_from;
/**
* @Type("DateTime<'Y-m-d'>")
*/
public $date_to;
/**
* @Type("array<integer>")
* @XmlList(inline = true, entry = "offer_id")
*/
public $offer_id;
/**
* @Type("array<integer>")
* @XmlList(inline = true, entry = "room_id")
*/
public $room_id;
+
+ /**
+ * @Type("integer")
+ */
+ public $typ;
} | 5 | 0.16129 | 5 | 0 |
26e4dbb009a987477a48825e947be1bbbd135743 | exercises/matrix/metadata.yml | exercises/matrix/metadata.yml | ---
blurb: "Write a program that, given a string representing a matrix of numbers, can return the rows and columns of that matrix."
source: "Warmup to the `saddle-points` warmup."
source_url: "http://jumpstartlab.com"
| ---
blurb: "Given a string representing a matrix of numbers, return the rows and columns of that matrix."
source: "Warmup to the `saddle-points` warmup."
source_url: "http://jumpstartlab.com"
| Remove "write a program" from matrix exercise | Remove "write a program" from matrix exercise
| YAML | mit | kgengler/x-common,jmluy/x-common,exercism/x-common,jmluy/x-common,Vankog/problem-specifications,ErikSchierboom/x-common,petertseng/x-common,petertseng/x-common,ErikSchierboom/x-common,rpottsoh/x-common,rpottsoh/x-common,jmluy/x-common,Vankog/problem-specifications,exercism/x-common,kgengler/x-common | yaml | ## Code Before:
---
blurb: "Write a program that, given a string representing a matrix of numbers, can return the rows and columns of that matrix."
source: "Warmup to the `saddle-points` warmup."
source_url: "http://jumpstartlab.com"
## Instruction:
Remove "write a program" from matrix exercise
## Code After:
---
blurb: "Given a string representing a matrix of numbers, return the rows and columns of that matrix."
source: "Warmup to the `saddle-points` warmup."
source_url: "http://jumpstartlab.com"
| ---
- blurb: "Write a program that, given a string representing a matrix of numbers, can return the rows and columns of that matrix."
? ^^^^^^^^^^^^^^^^^^^^^^^ ----
+ blurb: "Given a string representing a matrix of numbers, return the rows and columns of that matrix."
? ^
source: "Warmup to the `saddle-points` warmup."
source_url: "http://jumpstartlab.com" | 2 | 0.5 | 1 | 1 |
d1aa094c9b0988c12100c8300aae4b390bb276f8 | zonetruck/__main__.py | zonetruck/__main__.py | import yaml
from zonetruck.WorkManager import WorkManager
from zonetruck.ZoneUpdater import ZoneUpdater
from zonetruck.ZoneFilter import ZoneFilter
from zonetruck.zone_xfer import zone_xfer
def main(argv):
config = yaml.safe_load(open(argv[1], 'r'))
zone_filter = ZoneFilter(config['filter_rules']).filter
zone_updaters = [ZoneUpdater(**o).task for o in config['outputs']]
subsequent_tasks = [[zone_filter], zone_updaters]
work_manager = WorkManager()
for source in config['sources']:
for zone in source['zones']:
work_manager.submit_work(100, zone_xfer, (source['masters'], zone), subsequent_tasks)
work_manager.start()
work_manager.join()
if __name__ == '__main__':
import sys
main(sys.argv) | import yaml
from zonetruck.WorkManager import WorkManager
from zonetruck.ZoneUpdater import ZoneUpdater
from zonetruck.ZoneFilter import ZoneFilter
from zonetruck.zone_xfer import zone_xfer
import sys
def main(argv=None):
argv = argv or sys.argv
config = yaml.safe_load(open(argv[1], 'r'))
zone_filter = ZoneFilter(config['filter_rules']).filter
zone_updaters = [ZoneUpdater(**o).task for o in config['outputs']]
subsequent_tasks = [[zone_filter], zone_updaters]
work_manager = WorkManager()
for source in config['sources']:
for zone in source['zones']:
work_manager.submit_work(100, zone_xfer, (source['masters'], zone), subsequent_tasks)
work_manager.start()
work_manager.join()
if __name__ == '__main__':
main() | Make argv arguments optional, fixes pip installed script | Make argv arguments optional, fixes pip installed script
| Python | mit | pv2b/zonetruck | python | ## Code Before:
import yaml
from zonetruck.WorkManager import WorkManager
from zonetruck.ZoneUpdater import ZoneUpdater
from zonetruck.ZoneFilter import ZoneFilter
from zonetruck.zone_xfer import zone_xfer
def main(argv):
config = yaml.safe_load(open(argv[1], 'r'))
zone_filter = ZoneFilter(config['filter_rules']).filter
zone_updaters = [ZoneUpdater(**o).task for o in config['outputs']]
subsequent_tasks = [[zone_filter], zone_updaters]
work_manager = WorkManager()
for source in config['sources']:
for zone in source['zones']:
work_manager.submit_work(100, zone_xfer, (source['masters'], zone), subsequent_tasks)
work_manager.start()
work_manager.join()
if __name__ == '__main__':
import sys
main(sys.argv)
## Instruction:
Make argv arguments optional, fixes pip installed script
## Code After:
import yaml
from zonetruck.WorkManager import WorkManager
from zonetruck.ZoneUpdater import ZoneUpdater
from zonetruck.ZoneFilter import ZoneFilter
from zonetruck.zone_xfer import zone_xfer
import sys
def main(argv=None):
argv = argv or sys.argv
config = yaml.safe_load(open(argv[1], 'r'))
zone_filter = ZoneFilter(config['filter_rules']).filter
zone_updaters = [ZoneUpdater(**o).task for o in config['outputs']]
subsequent_tasks = [[zone_filter], zone_updaters]
work_manager = WorkManager()
for source in config['sources']:
for zone in source['zones']:
work_manager.submit_work(100, zone_xfer, (source['masters'], zone), subsequent_tasks)
work_manager.start()
work_manager.join()
if __name__ == '__main__':
main() | import yaml
from zonetruck.WorkManager import WorkManager
from zonetruck.ZoneUpdater import ZoneUpdater
from zonetruck.ZoneFilter import ZoneFilter
from zonetruck.zone_xfer import zone_xfer
+ import sys
- def main(argv):
+ def main(argv=None):
? +++++
+ argv = argv or sys.argv
config = yaml.safe_load(open(argv[1], 'r'))
zone_filter = ZoneFilter(config['filter_rules']).filter
zone_updaters = [ZoneUpdater(**o).task for o in config['outputs']]
subsequent_tasks = [[zone_filter], zone_updaters]
work_manager = WorkManager()
for source in config['sources']:
for zone in source['zones']:
work_manager.submit_work(100, zone_xfer, (source['masters'], zone), subsequent_tasks)
work_manager.start()
work_manager.join()
if __name__ == '__main__':
+ main()
- import sys
- main(sys.argv) | 7 | 0.269231 | 4 | 3 |
e7759b4bae27de4a5bc4e3226287279bf64dfb5f | core/dbt/task/clean.py | core/dbt/task/clean.py | import os.path
import os
import shutil
from dbt.task.base import ProjectOnlyTask
from dbt.logger import GLOBAL_LOGGER as logger
class CleanTask(ProjectOnlyTask):
def __is_project_path(self, path):
proj_path = os.path.abspath('.')
return not os.path.commonprefix(
[proj_path, os.path.abspath(path)]
) == proj_path
def __is_protected_path(self, path):
"""
This function identifies protected paths, so as not to clean them.
"""
abs_path = os.path.abspath(path)
protected_paths = self.config.source_paths + \
self.config.test_paths + ['.']
protected_abs_paths = [os.path.abspath for p in protected_paths]
return abs_path in set(protected_abs_paths) or \
self.__is_project_path(abs_path)
def run(self):
"""
This function takes all the paths in the target file
and cleans the project paths that are not protected.
"""
for path in self.config.clean_targets:
logger.info("Checking {}/*".format(path))
if not self.__is_protected_path(path):
shutil.rmtree(path, True)
logger.info(" Cleaned {}/*".format(path))
else:
logger.info("{}/* cannot be cleaned".format(path))
logger.info("Finished cleaning all paths.")
| import os.path
import os
import shutil
from dbt.task.base import ProjectOnlyTask
from dbt.logger import GLOBAL_LOGGER as logger
class CleanTask(ProjectOnlyTask):
def __is_project_path(self, path):
proj_path = os.path.abspath('.')
return not os.path.commonprefix(
[proj_path, os.path.abspath(path)]
) == proj_path
def __is_protected_path(self, path):
"""
This function identifies protected paths, so as not to clean them.
"""
abs_path = os.path.abspath(path)
protected_paths = self.config.source_paths + \
self.config.test_paths + ['.']
protected_abs_paths = [os.path.abspath for p in protected_paths]
return abs_path in set(protected_abs_paths) or \
self.__is_project_path(abs_path)
def run(self):
"""
This function takes all the paths in the target file
and cleans the project paths that are not protected.
"""
for path in self.config.clean_targets:
logger.info("Checking {}/*".format(path))
if not self.__is_protected_path(path):
shutil.rmtree(path, True)
logger.info(" Cleaned {}/*".format(path))
else:
logger.info("ERROR: not cleaning {}/* because it is protected".format(path))
logger.info("Finished cleaning all paths.")
| Update error message with error warning | Update error message with error warning | Python | apache-2.0 | analyst-collective/dbt,fishtown-analytics/dbt,fishtown-analytics/dbt,fishtown-analytics/dbt,analyst-collective/dbt | python | ## Code Before:
import os.path
import os
import shutil
from dbt.task.base import ProjectOnlyTask
from dbt.logger import GLOBAL_LOGGER as logger
class CleanTask(ProjectOnlyTask):
def __is_project_path(self, path):
proj_path = os.path.abspath('.')
return not os.path.commonprefix(
[proj_path, os.path.abspath(path)]
) == proj_path
def __is_protected_path(self, path):
"""
This function identifies protected paths, so as not to clean them.
"""
abs_path = os.path.abspath(path)
protected_paths = self.config.source_paths + \
self.config.test_paths + ['.']
protected_abs_paths = [os.path.abspath for p in protected_paths]
return abs_path in set(protected_abs_paths) or \
self.__is_project_path(abs_path)
def run(self):
"""
This function takes all the paths in the target file
and cleans the project paths that are not protected.
"""
for path in self.config.clean_targets:
logger.info("Checking {}/*".format(path))
if not self.__is_protected_path(path):
shutil.rmtree(path, True)
logger.info(" Cleaned {}/*".format(path))
else:
logger.info("{}/* cannot be cleaned".format(path))
logger.info("Finished cleaning all paths.")
## Instruction:
Update error message with error warning
## Code After:
import os.path
import os
import shutil
from dbt.task.base import ProjectOnlyTask
from dbt.logger import GLOBAL_LOGGER as logger
class CleanTask(ProjectOnlyTask):
def __is_project_path(self, path):
proj_path = os.path.abspath('.')
return not os.path.commonprefix(
[proj_path, os.path.abspath(path)]
) == proj_path
def __is_protected_path(self, path):
"""
This function identifies protected paths, so as not to clean them.
"""
abs_path = os.path.abspath(path)
protected_paths = self.config.source_paths + \
self.config.test_paths + ['.']
protected_abs_paths = [os.path.abspath for p in protected_paths]
return abs_path in set(protected_abs_paths) or \
self.__is_project_path(abs_path)
def run(self):
"""
This function takes all the paths in the target file
and cleans the project paths that are not protected.
"""
for path in self.config.clean_targets:
logger.info("Checking {}/*".format(path))
if not self.__is_protected_path(path):
shutil.rmtree(path, True)
logger.info(" Cleaned {}/*".format(path))
else:
logger.info("ERROR: not cleaning {}/* because it is protected".format(path))
logger.info("Finished cleaning all paths.")
| import os.path
import os
import shutil
from dbt.task.base import ProjectOnlyTask
from dbt.logger import GLOBAL_LOGGER as logger
class CleanTask(ProjectOnlyTask):
def __is_project_path(self, path):
proj_path = os.path.abspath('.')
return not os.path.commonprefix(
[proj_path, os.path.abspath(path)]
) == proj_path
def __is_protected_path(self, path):
"""
This function identifies protected paths, so as not to clean them.
"""
abs_path = os.path.abspath(path)
protected_paths = self.config.source_paths + \
self.config.test_paths + ['.']
protected_abs_paths = [os.path.abspath for p in protected_paths]
return abs_path in set(protected_abs_paths) or \
self.__is_project_path(abs_path)
def run(self):
"""
This function takes all the paths in the target file
and cleans the project paths that are not protected.
"""
for path in self.config.clean_targets:
logger.info("Checking {}/*".format(path))
if not self.__is_protected_path(path):
shutil.rmtree(path, True)
logger.info(" Cleaned {}/*".format(path))
else:
- logger.info("{}/* cannot be cleaned".format(path))
+ logger.info("ERROR: not cleaning {}/* because it is protected".format(path))
logger.info("Finished cleaning all paths.") | 2 | 0.05 | 1 | 1 |
dd65abfa7e98a4180bf0e6cf8206ce907b6badb7 | doc/update-from-v4-to-v5.md | doc/update-from-v4-to-v5.md |
sudo service gitlab-ci-runner stop
## 2. Get recent code
sudo su gitlab_ci_runner
cd ~/gitlab-ci-runner
git fetch origin
git checkout 5-0-stable
## 3. Start Runner
sudo service gitlab-ci-runner start
|
sudo service gitlab-ci-runner stop
## 2. Get recent code
sudo su gitlab_ci_runner
cd ~/gitlab-ci-runner
git fetch origin
git checkout 5-0-stable
bundle install --deployment
## 3. Start Runner
sudo service gitlab-ci-runner start | Add missing command to update guide | Add missing command to update guide
| Markdown | mit | cui-liqiang/gitlab-ci-runner,mfittko/gitlab-ci-runner,cui-liqiang/gitlab-ci-runner,gitlabhq/gitlab-ci-runner,mfittko/gitlab-ci-runner,andrewmunsell/gitlab-ci-runner,andrewmunsell/gitlab-ci-runner,gitlabhq/gitlab-ci-runner | markdown | ## Code Before:
sudo service gitlab-ci-runner stop
## 2. Get recent code
sudo su gitlab_ci_runner
cd ~/gitlab-ci-runner
git fetch origin
git checkout 5-0-stable
## 3. Start Runner
sudo service gitlab-ci-runner start
## Instruction:
Add missing command to update guide
## Code After:
sudo service gitlab-ci-runner stop
## 2. Get recent code
sudo su gitlab_ci_runner
cd ~/gitlab-ci-runner
git fetch origin
git checkout 5-0-stable
bundle install --deployment
## 3. Start Runner
sudo service gitlab-ci-runner start |
sudo service gitlab-ci-runner stop
## 2. Get recent code
sudo su gitlab_ci_runner
cd ~/gitlab-ci-runner
git fetch origin
git checkout 5-0-stable
+ bundle install --deployment
## 3. Start Runner
sudo service gitlab-ci-runner start | 1 | 0.076923 | 1 | 0 |
5e804fc75a18e24ab8c70f94a18d169a5ca40086 | src/Routers/BaseRouter.php | src/Routers/BaseRouter.php | <?php
namespace SebastiaanLuca\Router\Routers;
use Illuminate\Contracts\Routing\Registrar as RegistrarContract;
/**
* Class BaseRouter
*
* The base class every router should extend.
*
* @package SebastiaanLuca\Router\Routers
*/
abstract class BaseRouter implements RouterInterface
{
/**
* The routing instance.
*
* @var \SebastiaanLuca\Router\ExtendedRouter|\Illuminate\Routing\Router
*/
protected $router;
/**
* The default controller namespace.
*
* @var string
*/
protected $namespace = '';
/**
* BaseRouter constructor.
*
* @param \Illuminate\Contracts\Routing\Registrar $router
*/
public function __construct(RegistrarContract $router)
{
$this->router = $router;
$this->map();
}
/**
* Get the default namespace with the suffix attached.
*
* @param string|null $suffix
*
* @return string
*/
public function getNamespace($suffix = null)
{
if (! $suffix) {
return $this->namespace;
}
return $this->namespace . '\\' . $suffix;
}
/**
* Map the routes.
*/
public abstract function map();
}
| <?php
namespace SebastiaanLuca\Router\Routers;
use Illuminate\Contracts\Routing\Registrar as RegistrarContract;
/**
* Class BaseRouter
*
* The base class every router should extend.
*
* @package SebastiaanLuca\Router\Routers
*/
abstract class BaseRouter implements RouterInterface
{
/**
* The routing instance.
*
* @var \SebastiaanLuca\Router\ExtendedRouter|\Illuminate\Routing\Router
*/
protected $router;
/**
* The Dingo API router.
*
* @var \Dingo\Api\Routing\Router
*/
protected $api;
/**
* The default controller namespace.
*
* @var string
*/
protected $namespace = '';
/**
* BaseRouter constructor.
*
* @param \Illuminate\Contracts\Routing\Registrar $router
*/
public function __construct(RegistrarContract $router)
{
$this->router = $router;
$this->setUpApiRouter();
$this->map();
}
/**
* Assign the API router if the Dingo API package is installed.
*/
protected function setUpApiRouter()
{
if (class_exists('\Dingo\Api\Routing\Router')) {
$this->api = app('\Dingo\Api\Routing\Router');
}
}
/**
* Get the default namespace with the suffix attached.
*
* @param string|null $suffix
*
* @return string
*/
public function getNamespace($suffix = null)
{
if (! $suffix) {
return $this->namespace;
}
return $this->namespace . '\\' . $suffix;
}
/**
* Map the routes.
*/
public abstract function map();
}
| Add a feature to detected use of the Dingo API package and assign its router for ease-of-use | Add a feature to detected use of the Dingo API package and assign its router for ease-of-use
| PHP | mit | sebastiaanluca/laravel-router | php | ## Code Before:
<?php
namespace SebastiaanLuca\Router\Routers;
use Illuminate\Contracts\Routing\Registrar as RegistrarContract;
/**
* Class BaseRouter
*
* The base class every router should extend.
*
* @package SebastiaanLuca\Router\Routers
*/
abstract class BaseRouter implements RouterInterface
{
/**
* The routing instance.
*
* @var \SebastiaanLuca\Router\ExtendedRouter|\Illuminate\Routing\Router
*/
protected $router;
/**
* The default controller namespace.
*
* @var string
*/
protected $namespace = '';
/**
* BaseRouter constructor.
*
* @param \Illuminate\Contracts\Routing\Registrar $router
*/
public function __construct(RegistrarContract $router)
{
$this->router = $router;
$this->map();
}
/**
* Get the default namespace with the suffix attached.
*
* @param string|null $suffix
*
* @return string
*/
public function getNamespace($suffix = null)
{
if (! $suffix) {
return $this->namespace;
}
return $this->namespace . '\\' . $suffix;
}
/**
* Map the routes.
*/
public abstract function map();
}
## Instruction:
Add a feature to detected use of the Dingo API package and assign its router for ease-of-use
## Code After:
<?php
namespace SebastiaanLuca\Router\Routers;
use Illuminate\Contracts\Routing\Registrar as RegistrarContract;
/**
* Class BaseRouter
*
* The base class every router should extend.
*
* @package SebastiaanLuca\Router\Routers
*/
abstract class BaseRouter implements RouterInterface
{
/**
* The routing instance.
*
* @var \SebastiaanLuca\Router\ExtendedRouter|\Illuminate\Routing\Router
*/
protected $router;
/**
* The Dingo API router.
*
* @var \Dingo\Api\Routing\Router
*/
protected $api;
/**
* The default controller namespace.
*
* @var string
*/
protected $namespace = '';
/**
* BaseRouter constructor.
*
* @param \Illuminate\Contracts\Routing\Registrar $router
*/
public function __construct(RegistrarContract $router)
{
$this->router = $router;
$this->setUpApiRouter();
$this->map();
}
/**
* Assign the API router if the Dingo API package is installed.
*/
protected function setUpApiRouter()
{
if (class_exists('\Dingo\Api\Routing\Router')) {
$this->api = app('\Dingo\Api\Routing\Router');
}
}
/**
* Get the default namespace with the suffix attached.
*
* @param string|null $suffix
*
* @return string
*/
public function getNamespace($suffix = null)
{
if (! $suffix) {
return $this->namespace;
}
return $this->namespace . '\\' . $suffix;
}
/**
* Map the routes.
*/
public abstract function map();
}
| <?php
namespace SebastiaanLuca\Router\Routers;
use Illuminate\Contracts\Routing\Registrar as RegistrarContract;
/**
* Class BaseRouter
*
* The base class every router should extend.
*
* @package SebastiaanLuca\Router\Routers
*/
abstract class BaseRouter implements RouterInterface
{
/**
* The routing instance.
*
* @var \SebastiaanLuca\Router\ExtendedRouter|\Illuminate\Routing\Router
*/
protected $router;
/**
+ * The Dingo API router.
+ *
+ * @var \Dingo\Api\Routing\Router
+ */
+ protected $api;
+
+ /**
* The default controller namespace.
*
* @var string
*/
protected $namespace = '';
/**
* BaseRouter constructor.
*
* @param \Illuminate\Contracts\Routing\Registrar $router
*/
public function __construct(RegistrarContract $router)
{
$this->router = $router;
+ $this->setUpApiRouter();
+
$this->map();
+ }
+
+
+
+ /**
+ * Assign the API router if the Dingo API package is installed.
+ */
+ protected function setUpApiRouter()
+ {
+ if (class_exists('\Dingo\Api\Routing\Router')) {
+ $this->api = app('\Dingo\Api\Routing\Router');
+ }
}
/**
* Get the default namespace with the suffix attached.
*
* @param string|null $suffix
*
* @return string
*/
public function getNamespace($suffix = null)
{
if (! $suffix) {
return $this->namespace;
}
return $this->namespace . '\\' . $suffix;
}
/**
* Map the routes.
*/
public abstract function map();
} | 21 | 0.308824 | 21 | 0 |
ba104e31d3576e661c8b8db1b7e9ca5505002430 | demo/tree.ur | demo/tree.ur | sequence s
table t : { Id : int, Parent : option int, Nam : string }
PRIMARY KEY Id
open TreeFun.Make(struct
val tab = t
end)
fun row r = <xml>
#{[r.Id]}: {[r.Nam]} <a link={del r.Id}>[Delete]</a>
<form>
Add child: <textbox{#Nam}/> <submit action={add (Some r.Id)}/>
</form>
</xml>
and main () =
xml <- tree row None;
return <xml><body>
{xml}
<form>
Add a top-level node: <textbox{#Nam}/> <submit action={add None}/>
</form>
</body></xml>
and add parent r =
id <- nextval s;
dml (INSERT INTO t (Id, Parent, Nam) VALUES ({[id]}, {[parent]}, {[r.Nam]}));
main ()
and del id =
dml (DELETE FROM t WHERE Id = {[id]});
main ()
| sequence s
table t : { Id : int, Parent : option int, Nam : string }
PRIMARY KEY Id,
CONSTRAINT F FOREIGN KEY Parent REFERENCES t (Id) ON DELETE CASCADE
open TreeFun.Make(struct
val tab = t
end)
fun row r = <xml>
#{[r.Id]}: {[r.Nam]} <a link={del r.Id}>[Delete]</a>
<form>
Add child: <textbox{#Nam}/> <submit action={add (Some r.Id)}/>
</form>
</xml>
and main () =
xml <- tree row None;
return <xml><body>
{xml}
<form>
Add a top-level node: <textbox{#Nam}/> <submit action={add None}/>
</form>
</body></xml>
and add parent r =
id <- nextval s;
dml (INSERT INTO t (Id, Parent, Nam) VALUES ({[id]}, {[parent]}, {[r.Nam]}));
main ()
and del id =
dml (DELETE FROM t WHERE Id = {[id]});
main ()
| Use FOREIGN KEY in Tree demo | Use FOREIGN KEY in Tree demo
| UrWeb | bsd-3-clause | urweb/debian-urweb,thinkpad20/urweb,urweb/debian-urweb,urweb/debian-urweb,urweb/debian-urweb,thinkpad20/urweb,thinkpad20/urweb,urweb/debian-urweb | urweb | ## Code Before:
sequence s
table t : { Id : int, Parent : option int, Nam : string }
PRIMARY KEY Id
open TreeFun.Make(struct
val tab = t
end)
fun row r = <xml>
#{[r.Id]}: {[r.Nam]} <a link={del r.Id}>[Delete]</a>
<form>
Add child: <textbox{#Nam}/> <submit action={add (Some r.Id)}/>
</form>
</xml>
and main () =
xml <- tree row None;
return <xml><body>
{xml}
<form>
Add a top-level node: <textbox{#Nam}/> <submit action={add None}/>
</form>
</body></xml>
and add parent r =
id <- nextval s;
dml (INSERT INTO t (Id, Parent, Nam) VALUES ({[id]}, {[parent]}, {[r.Nam]}));
main ()
and del id =
dml (DELETE FROM t WHERE Id = {[id]});
main ()
## Instruction:
Use FOREIGN KEY in Tree demo
## Code After:
sequence s
table t : { Id : int, Parent : option int, Nam : string }
PRIMARY KEY Id,
CONSTRAINT F FOREIGN KEY Parent REFERENCES t (Id) ON DELETE CASCADE
open TreeFun.Make(struct
val tab = t
end)
fun row r = <xml>
#{[r.Id]}: {[r.Nam]} <a link={del r.Id}>[Delete]</a>
<form>
Add child: <textbox{#Nam}/> <submit action={add (Some r.Id)}/>
</form>
</xml>
and main () =
xml <- tree row None;
return <xml><body>
{xml}
<form>
Add a top-level node: <textbox{#Nam}/> <submit action={add None}/>
</form>
</body></xml>
and add parent r =
id <- nextval s;
dml (INSERT INTO t (Id, Parent, Nam) VALUES ({[id]}, {[parent]}, {[r.Nam]}));
main ()
and del id =
dml (DELETE FROM t WHERE Id = {[id]});
main ()
| sequence s
table t : { Id : int, Parent : option int, Nam : string }
- PRIMARY KEY Id
+ PRIMARY KEY Id,
? +
+ CONSTRAINT F FOREIGN KEY Parent REFERENCES t (Id) ON DELETE CASCADE
open TreeFun.Make(struct
val tab = t
end)
fun row r = <xml>
#{[r.Id]}: {[r.Nam]} <a link={del r.Id}>[Delete]</a>
<form>
Add child: <textbox{#Nam}/> <submit action={add (Some r.Id)}/>
</form>
</xml>
and main () =
xml <- tree row None;
return <xml><body>
{xml}
<form>
Add a top-level node: <textbox{#Nam}/> <submit action={add None}/>
</form>
</body></xml>
and add parent r =
id <- nextval s;
dml (INSERT INTO t (Id, Parent, Nam) VALUES ({[id]}, {[parent]}, {[r.Nam]}));
main ()
and del id =
dml (DELETE FROM t WHERE Id = {[id]});
main () | 3 | 0.088235 | 2 | 1 |
fe0eee2d9d3a6b807153a26f9634df58e571cb23 | README.md | README.md |
Just Another Movies List App.
<table>
<tbody>
<tr>
<th>App Version</th>
<td>v1.0.0</td>
</tr>
<tr>
<th>Meteor Version</th>
<td>v1.4.4.1</td>
</tr>
</tbody>
</table>
##
### Up and running steps:
1. install meteorjs from [here](https://www.meteor.com/install)
2. execute these from your terminal:
```
$ meteor npm install
$ meteor npm run start
```
3: open your browser to http://localhost:3000
##
### Coming soon features:
* REST api endpoints
* MobX UI states
|
Just Another Movies List App.
<table>
<tbody>
<tr>
<th>App Version</th>
<td>v1.0.0</td>
</tr>
<tr>
<th>Meteor Chef Base Version</th>
<td>v4.15.0</td>
</tr>
<tr>
<th>Meteor Version</th>
<td>v1.4.4.1</td>
</tr>
</tbody>
</table>
##
### Up and running steps:
1. install meteorjs from [here](https://www.meteor.com/install)
2. execute these from your terminal:
```
$ meteor npm install
$ meteor npm run start
```
3: open your browser to http://localhost:3000
##
### Coming soon features:
* REST api endpoints
* MobX UI states
| Add meteor chef base version info | Add meteor chef base version info
| Markdown | mit | zarazi/movies-listie,zarazi/movies-listie | markdown | ## Code Before:
Just Another Movies List App.
<table>
<tbody>
<tr>
<th>App Version</th>
<td>v1.0.0</td>
</tr>
<tr>
<th>Meteor Version</th>
<td>v1.4.4.1</td>
</tr>
</tbody>
</table>
##
### Up and running steps:
1. install meteorjs from [here](https://www.meteor.com/install)
2. execute these from your terminal:
```
$ meteor npm install
$ meteor npm run start
```
3: open your browser to http://localhost:3000
##
### Coming soon features:
* REST api endpoints
* MobX UI states
## Instruction:
Add meteor chef base version info
## Code After:
Just Another Movies List App.
<table>
<tbody>
<tr>
<th>App Version</th>
<td>v1.0.0</td>
</tr>
<tr>
<th>Meteor Chef Base Version</th>
<td>v4.15.0</td>
</tr>
<tr>
<th>Meteor Version</th>
<td>v1.4.4.1</td>
</tr>
</tbody>
</table>
##
### Up and running steps:
1. install meteorjs from [here](https://www.meteor.com/install)
2. execute these from your terminal:
```
$ meteor npm install
$ meteor npm run start
```
3: open your browser to http://localhost:3000
##
### Coming soon features:
* REST api endpoints
* MobX UI states
|
Just Another Movies List App.
<table>
<tbody>
<tr>
<th>App Version</th>
<td>v1.0.0</td>
+ </tr>
+ <tr>
+ <th>Meteor Chef Base Version</th>
+ <td>v4.15.0</td>
</tr>
<tr>
<th>Meteor Version</th>
<td>v1.4.4.1</td>
</tr>
</tbody>
</table>
##
### Up and running steps:
1. install meteorjs from [here](https://www.meteor.com/install)
2. execute these from your terminal:
```
$ meteor npm install
$ meteor npm run start
```
3: open your browser to http://localhost:3000
##
### Coming soon features:
* REST api endpoints
* MobX UI states
| 4 | 0.125 | 4 | 0 |
28956f188fce9f5e2618f84d6d4af7fbbf629383 | project/frontend/src/containers/LandingPage/LandingPage.js | project/frontend/src/containers/LandingPage/LandingPage.js | import React from "react";
import classes from "./LandingPage.module.css";
import LoginButtonSet from "../../components/UI/LoginButtonSet/LoginButtonSet";
const LandingPage = (props) => {
return (
<div className={classes.LandingPageContent}>
<div className={classes.Hero}>
<h1>Who's the next trendsetter?</h1>
<h3>
Something something a description of what the game does, what benefits
it has. Something something ...
</h3>
<LoginButtonSet></LoginButtonSet>
</div>
<div className={classes.IllustrationContainer}></div>
</div>
);
};
export default LandingPage;
| import React from "react";
import classes from "./LandingPage.module.css";
import LoginButtonSet from "../../components/UI/LoginButtonSet/LoginButtonSet";
import Illustration from "../../assets/landing_illustration.png";
const LandingPage = (props) => {
return (
<div className={classes.LandingPageContent}>
<div className={classes.Hero}>
<h1>Who's the next trendsetter?</h1>
<h3>
Something something a description of what the game does, what benefits
it has. Something something ...
</h3>
<LoginButtonSet></LoginButtonSet>
</div>
<div className={classes.IllustrationContainer}>
<img src={Illustration} className={classes.Logo} />
</div>
</div>
);
};
export default LandingPage;
| Add illustration to landing page | Add illustration to landing page
| JavaScript | apache-2.0 | googleinterns/sgonks,googleinterns/sgonks,googleinterns/sgonks,googleinterns/sgonks | javascript | ## Code Before:
import React from "react";
import classes from "./LandingPage.module.css";
import LoginButtonSet from "../../components/UI/LoginButtonSet/LoginButtonSet";
const LandingPage = (props) => {
return (
<div className={classes.LandingPageContent}>
<div className={classes.Hero}>
<h1>Who's the next trendsetter?</h1>
<h3>
Something something a description of what the game does, what benefits
it has. Something something ...
</h3>
<LoginButtonSet></LoginButtonSet>
</div>
<div className={classes.IllustrationContainer}></div>
</div>
);
};
export default LandingPage;
## Instruction:
Add illustration to landing page
## Code After:
import React from "react";
import classes from "./LandingPage.module.css";
import LoginButtonSet from "../../components/UI/LoginButtonSet/LoginButtonSet";
import Illustration from "../../assets/landing_illustration.png";
const LandingPage = (props) => {
return (
<div className={classes.LandingPageContent}>
<div className={classes.Hero}>
<h1>Who's the next trendsetter?</h1>
<h3>
Something something a description of what the game does, what benefits
it has. Something something ...
</h3>
<LoginButtonSet></LoginButtonSet>
</div>
<div className={classes.IllustrationContainer}>
<img src={Illustration} className={classes.Logo} />
</div>
</div>
);
};
export default LandingPage;
| import React from "react";
import classes from "./LandingPage.module.css";
import LoginButtonSet from "../../components/UI/LoginButtonSet/LoginButtonSet";
+ import Illustration from "../../assets/landing_illustration.png";
const LandingPage = (props) => {
return (
<div className={classes.LandingPageContent}>
<div className={classes.Hero}>
<h1>Who's the next trendsetter?</h1>
<h3>
Something something a description of what the game does, what benefits
it has. Something something ...
</h3>
<LoginButtonSet></LoginButtonSet>
</div>
- <div className={classes.IllustrationContainer}></div>
? ------
+ <div className={classes.IllustrationContainer}>
+ <img src={Illustration} className={classes.Logo} />
+ </div>
</div>
);
};
export default LandingPage; | 5 | 0.238095 | 4 | 1 |
b4e3461277669bf42225d278d491b7c714968491 | vm_server/test/execute_macro/code/execute.py | vm_server/test/execute_macro/code/execute.py | import os
import shutil
import win32com.client
import pythoncom
import repackage
repackage.up()
def execute_macro():
"""Execute VBA macro in MS Excel
"""
pythoncom.CoInitialize()
current_path = os.path.dirname(os.getcwd())
path_to_file = current_path + "\\action\\data\\excelsheet.xlsm"
if os.path.exists(path_to_file):
xl_file = win32com.client.Dispatch("Excel.Application")
xl_run = xl_file.Workbooks.Open(os.path.abspath(path_to_file),
ReadOnly=1)
xl_run.Application.Run("excelsheet.xlsm!Module1.add_numbers_in_column") #execute macro
xl_run.Save()
xl_run.Close()
xl_file.Quit()
del xl_file
shutil.move(path_to_file, current_path +
"\\action\\output\\excelsheet.xlsm")
shutil.move(current_path + "\\action\\data\\output.txt", current_path +
"\\action\\output\\output.txt")
print("Action successfully executed")
if __name__ == "__main__":
execute_macro()
| import os
import shutil
import win32com.client
import pythoncom
def execute_macro():
"""Execute VBA macro in MS Excel
"""
pythoncom.CoInitialize()
current_path = os.path.dirname(os.getcwd())
path_to_file = ".\\data\\excelsheet.xlsm"
if os.path.exists(path_to_file):
xl_file = win32com.client.Dispatch("Excel.Application")
xl_run = xl_file.Workbooks.Open(os.path.abspath(path_to_file),
ReadOnly=1)
xl_run.Application.Run("excelsheet.xlsm!Module1.add_numbers_in_column") #execute macro
xl_run.Save()
xl_run.Close()
xl_file.Quit()
del xl_file
shutil.move(path_to_file, ".\\output\\excelsheet.xlsm")
shutil.move(".\\data\\output.txt", ".\\output\\output.txt")
print("Action successfully executed")
if __name__ == "__main__":
execute_macro()
| Modify excel screenshot test so that it works with the new directory structure | Modify excel screenshot test so that it works with the new directory structure
| Python | apache-2.0 | googleinterns/automated-windows-vms,googleinterns/automated-windows-vms | python | ## Code Before:
import os
import shutil
import win32com.client
import pythoncom
import repackage
repackage.up()
def execute_macro():
"""Execute VBA macro in MS Excel
"""
pythoncom.CoInitialize()
current_path = os.path.dirname(os.getcwd())
path_to_file = current_path + "\\action\\data\\excelsheet.xlsm"
if os.path.exists(path_to_file):
xl_file = win32com.client.Dispatch("Excel.Application")
xl_run = xl_file.Workbooks.Open(os.path.abspath(path_to_file),
ReadOnly=1)
xl_run.Application.Run("excelsheet.xlsm!Module1.add_numbers_in_column") #execute macro
xl_run.Save()
xl_run.Close()
xl_file.Quit()
del xl_file
shutil.move(path_to_file, current_path +
"\\action\\output\\excelsheet.xlsm")
shutil.move(current_path + "\\action\\data\\output.txt", current_path +
"\\action\\output\\output.txt")
print("Action successfully executed")
if __name__ == "__main__":
execute_macro()
## Instruction:
Modify excel screenshot test so that it works with the new directory structure
## Code After:
import os
import shutil
import win32com.client
import pythoncom
def execute_macro():
"""Execute VBA macro in MS Excel
"""
pythoncom.CoInitialize()
current_path = os.path.dirname(os.getcwd())
path_to_file = ".\\data\\excelsheet.xlsm"
if os.path.exists(path_to_file):
xl_file = win32com.client.Dispatch("Excel.Application")
xl_run = xl_file.Workbooks.Open(os.path.abspath(path_to_file),
ReadOnly=1)
xl_run.Application.Run("excelsheet.xlsm!Module1.add_numbers_in_column") #execute macro
xl_run.Save()
xl_run.Close()
xl_file.Quit()
del xl_file
shutil.move(path_to_file, ".\\output\\excelsheet.xlsm")
shutil.move(".\\data\\output.txt", ".\\output\\output.txt")
print("Action successfully executed")
if __name__ == "__main__":
execute_macro()
| import os
import shutil
import win32com.client
import pythoncom
- import repackage
- repackage.up()
def execute_macro():
"""Execute VBA macro in MS Excel
"""
pythoncom.CoInitialize()
current_path = os.path.dirname(os.getcwd())
- path_to_file = current_path + "\\action\\data\\excelsheet.xlsm"
? --------------- ^^^^^^^^
+ path_to_file = ".\\data\\excelsheet.xlsm"
? ^
if os.path.exists(path_to_file):
xl_file = win32com.client.Dispatch("Excel.Application")
xl_run = xl_file.Workbooks.Open(os.path.abspath(path_to_file),
ReadOnly=1)
xl_run.Application.Run("excelsheet.xlsm!Module1.add_numbers_in_column") #execute macro
xl_run.Save()
xl_run.Close()
xl_file.Quit()
del xl_file
+ shutil.move(path_to_file, ".\\output\\excelsheet.xlsm")
+ shutil.move(".\\data\\output.txt", ".\\output\\output.txt")
- shutil.move(path_to_file, current_path +
- "\\action\\output\\excelsheet.xlsm")
- shutil.move(current_path + "\\action\\data\\output.txt", current_path +
- "\\action\\output\\output.txt")
print("Action successfully executed")
if __name__ == "__main__":
execute_macro() | 10 | 0.322581 | 3 | 7 |
e67801344f5221f3cadb8cd14487cf3abe67587e | .travis.yml | .travis.yml | language: java
jdk:
- oraclejdk7
- openjdk7
notifications:
email:
recipients:
- betfair-cougar-dev@googlegroups.com
on_success: always
on_failure: always
| language: java
jdk:
- oraclejdk7
- openjdk7
notifications:
email:
recipients:
- betfair-cougar-dev@googlegroups.com
on_success: always
on_failure: always
before_script:
- "gem install travis-artifacts"
#after_script:
# - "travis-artifacts upload --target-path $TRAVIS_BUILD_ID/$TRAVIS_JOB_ID --path cougar-framework/cougar-util/target/surefire-reports:cougar-util-surefire-reports"
after_failure: # this will of course run only on failure
- "travis-artifacts upload --target-path $TRAVIS_BUILD_ID/$TRAVIS_JOB_ID --path cougar-framework/cougar-util/target/surefire-reports:cougar-util-surefire-reports"
#after_success: # and this only on success
# - "travis-artifacts upload --target-path $TRAVIS_BUILD_ID/$TRAVIS_JOB_ID --path build/build.tar.gz"
env:
global:
- "ARTIFACTS_AWS_REGION=eu-west-1"
- "ARTIFACTS_S3_BUCKET=betfair.travis-ci"
- secure: "EYtFWbIMgrz4zN2w3kYFuLhU+XmBtLb+kxqdYiqaivyibkXm94WGk7ZgmpjsY1evp1enVzZRriaG7lpuNd9ItSSxaVTHrx/StKWBCyGpg2/QsrEoC+w2wY7ylhGy0jYsPfG55R48z4ZPfv2sJgJr1+eE8u06w+k7BBRuv3/0Whk="
- secure: "kcWNRtMYCBIwkI3/LVnBzATFDBdU4u9H8ZfAV06l8GbhBBFJrQW5L5f0UZtR28gIAOzcKC6a/TbRFnfHsTRc7Ppfr+LMaDdTROy2M9Fjd6xAyTkuq/oGiU5G6av55TDpfBznukJsZw0QoHi3yqaTgNM8XvAQDJ8GUF0O72M8g7o="
| Add artifact upload for our current failing test | Add artifact upload for our current failing test | YAML | apache-2.0 | betfair/cougar,betfair/cougar,olupas/cougar,olupas/cougar,betfair/cougar,olupas/cougar,olupas/cougar,eswdd/disco,eswdd/disco,eswdd/disco,betfair/cougar | yaml | ## Code Before:
language: java
jdk:
- oraclejdk7
- openjdk7
notifications:
email:
recipients:
- betfair-cougar-dev@googlegroups.com
on_success: always
on_failure: always
## Instruction:
Add artifact upload for our current failing test
## Code After:
language: java
jdk:
- oraclejdk7
- openjdk7
notifications:
email:
recipients:
- betfair-cougar-dev@googlegroups.com
on_success: always
on_failure: always
before_script:
- "gem install travis-artifacts"
#after_script:
# - "travis-artifacts upload --target-path $TRAVIS_BUILD_ID/$TRAVIS_JOB_ID --path cougar-framework/cougar-util/target/surefire-reports:cougar-util-surefire-reports"
after_failure: # this will of course run only on failure
- "travis-artifacts upload --target-path $TRAVIS_BUILD_ID/$TRAVIS_JOB_ID --path cougar-framework/cougar-util/target/surefire-reports:cougar-util-surefire-reports"
#after_success: # and this only on success
# - "travis-artifacts upload --target-path $TRAVIS_BUILD_ID/$TRAVIS_JOB_ID --path build/build.tar.gz"
env:
global:
- "ARTIFACTS_AWS_REGION=eu-west-1"
- "ARTIFACTS_S3_BUCKET=betfair.travis-ci"
- secure: "EYtFWbIMgrz4zN2w3kYFuLhU+XmBtLb+kxqdYiqaivyibkXm94WGk7ZgmpjsY1evp1enVzZRriaG7lpuNd9ItSSxaVTHrx/StKWBCyGpg2/QsrEoC+w2wY7ylhGy0jYsPfG55R48z4ZPfv2sJgJr1+eE8u06w+k7BBRuv3/0Whk="
- secure: "kcWNRtMYCBIwkI3/LVnBzATFDBdU4u9H8ZfAV06l8GbhBBFJrQW5L5f0UZtR28gIAOzcKC6a/TbRFnfHsTRc7Ppfr+LMaDdTROy2M9Fjd6xAyTkuq/oGiU5G6av55TDpfBznukJsZw0QoHi3yqaTgNM8XvAQDJ8GUF0O72M8g7o="
| language: java
jdk:
- oraclejdk7
- openjdk7
notifications:
email:
recipients:
- betfair-cougar-dev@googlegroups.com
on_success: always
on_failure: always
+ before_script:
+ - "gem install travis-artifacts"
+ #after_script:
+ # - "travis-artifacts upload --target-path $TRAVIS_BUILD_ID/$TRAVIS_JOB_ID --path cougar-framework/cougar-util/target/surefire-reports:cougar-util-surefire-reports"
+ after_failure: # this will of course run only on failure
+ - "travis-artifacts upload --target-path $TRAVIS_BUILD_ID/$TRAVIS_JOB_ID --path cougar-framework/cougar-util/target/surefire-reports:cougar-util-surefire-reports"
+ #after_success: # and this only on success
+ # - "travis-artifacts upload --target-path $TRAVIS_BUILD_ID/$TRAVIS_JOB_ID --path build/build.tar.gz"
+ env:
+ global:
+ - "ARTIFACTS_AWS_REGION=eu-west-1"
+ - "ARTIFACTS_S3_BUCKET=betfair.travis-ci"
+ - secure: "EYtFWbIMgrz4zN2w3kYFuLhU+XmBtLb+kxqdYiqaivyibkXm94WGk7ZgmpjsY1evp1enVzZRriaG7lpuNd9ItSSxaVTHrx/StKWBCyGpg2/QsrEoC+w2wY7ylhGy0jYsPfG55R48z4ZPfv2sJgJr1+eE8u06w+k7BBRuv3/0Whk="
+ - secure: "kcWNRtMYCBIwkI3/LVnBzATFDBdU4u9H8ZfAV06l8GbhBBFJrQW5L5f0UZtR28gIAOzcKC6a/TbRFnfHsTRc7Ppfr+LMaDdTROy2M9Fjd6xAyTkuq/oGiU5G6av55TDpfBznukJsZw0QoHi3yqaTgNM8XvAQDJ8GUF0O72M8g7o=" | 14 | 1.4 | 14 | 0 |
218c218582fd3dc3082a390fa68c42a92006a868 | src/StreamFactoryInterface.php | src/StreamFactoryInterface.php | <?php
namespace Interop\Http\Factory;
use Psr\Http\Message\StreamInterface;
interface StreamFactoryInterface
{
/**
* Create a new stream from a resource.
*
* The resource MUST be readable and SHOULD be seekable. It MAY be writable.
*
* @param resource $resource
*
* @return StreamInterface
*/
public function createStream($resource);
}
| <?php
namespace Interop\Http\Factory;
use Psr\Http\Message\StreamInterface;
interface StreamFactoryInterface
{
/**
* Create a new stream from a resource or a string.
*
* If argument is resource, it MUST be readable and SHOULD be seekable. It MAY be writable.
*
* @param string|resource $resource
*
* @return StreamInterface
*/
public function createStream($resource);
}
| Allow for strings in Stream::createStream | Allow for strings in Stream::createStream
| PHP | mit | http-interop/http-factory | php | ## Code Before:
<?php
namespace Interop\Http\Factory;
use Psr\Http\Message\StreamInterface;
interface StreamFactoryInterface
{
/**
* Create a new stream from a resource.
*
* The resource MUST be readable and SHOULD be seekable. It MAY be writable.
*
* @param resource $resource
*
* @return StreamInterface
*/
public function createStream($resource);
}
## Instruction:
Allow for strings in Stream::createStream
## Code After:
<?php
namespace Interop\Http\Factory;
use Psr\Http\Message\StreamInterface;
interface StreamFactoryInterface
{
/**
* Create a new stream from a resource or a string.
*
* If argument is resource, it MUST be readable and SHOULD be seekable. It MAY be writable.
*
* @param string|resource $resource
*
* @return StreamInterface
*/
public function createStream($resource);
}
| <?php
namespace Interop\Http\Factory;
use Psr\Http\Message\StreamInterface;
interface StreamFactoryInterface
{
/**
- * Create a new stream from a resource.
+ * Create a new stream from a resource or a string.
? ++++++++++++
*
- * The resource MUST be readable and SHOULD be seekable. It MAY be writable.
? ^^
+ * If argument is resource, it MUST be readable and SHOULD be seekable. It MAY be writable.
? ^^^^^^^^ +++++ ++++
*
- * @param resource $resource
+ * @param string|resource $resource
? +++++++
*
* @return StreamInterface
*/
public function createStream($resource);
} | 6 | 0.315789 | 3 | 3 |
f238e362bdbd1429a489273bd46545a4f1282425 | workspace/files/etc/php5/mods-available/c9.ini | workspace/files/etc/php5/mods-available/c9.ini | ; configuration defaults for Cloud9 IDE workspaces
; priority=80
date.timezone = Europe/Amsterdam | ; configuration defaults for Cloud9 IDE workspaces
; priority=80
date.timezone = UTC
display_errors = On
display_startup_errors = On
track_errors = On
error_reporting = E_ALL & ~E_NOTICE & ~E_STRICT & ~E_DEPRECATED
max_input_time = 300
max_execution_time = 300
| Enable PHP error reporting by default | Enable PHP error reporting by default
| INI | mit | hpssjellis/templates,hpssjellis/templates,hpssjellis/templates,hpssjellis/templates,c9/templates,c9/templates,c9/templates,c9/templates,c9/templates,c9/templates,c9/templates,hpssjellis/templates,hpssjellis/templates,hpssjellis/templates | ini | ## Code Before:
; configuration defaults for Cloud9 IDE workspaces
; priority=80
date.timezone = Europe/Amsterdam
## Instruction:
Enable PHP error reporting by default
## Code After:
; configuration defaults for Cloud9 IDE workspaces
; priority=80
date.timezone = UTC
display_errors = On
display_startup_errors = On
track_errors = On
error_reporting = E_ALL & ~E_NOTICE & ~E_STRICT & ~E_DEPRECATED
max_input_time = 300
max_execution_time = 300
| ; configuration defaults for Cloud9 IDE workspaces
; priority=80
- date.timezone = Europe/Amsterdam
+ date.timezone = UTC
+
+ display_errors = On
+ display_startup_errors = On
+ track_errors = On
+
+ error_reporting = E_ALL & ~E_NOTICE & ~E_STRICT & ~E_DEPRECATED
+
+ max_input_time = 300
+ max_execution_time = 300 | 11 | 2.75 | 10 | 1 |
f387343038b10aec84c22f3809773a30630c12e8 | README.md | README.md | GCM Library for Go
--
Provides the following functionality for Google Cloud Messaging:
1. Sending messages.
2. Listening to receiving messages.
Documentation: http://godoc.org/github.com/google/go-gcm
## Installation
$ go get github.com/google/go-gcm
## Status
This library is in Alpha. We will make an effort to support the library, but we reserve the right to make incompatible changes when necessary.
## Feedback
Please read CONTRIBUTING and raise issues here in Github.
| 
This project is no longer actively maintained, and remains here as an archive of this work.
For a replacement, check out [this actively maintained fork](https://github.com/kikinteractive/go-gcm) of the library.
GCM Library for Go
--
Provides the following functionality for Google Cloud Messaging:
1. Sending messages.
2. Listening to receiving messages.
Documentation: http://godoc.org/github.com/google/go-gcm
## Installation
$ go get github.com/google/go-gcm
## Status
This library is in Alpha. We will make an effort to support the library, but we reserve the right to make incompatible changes when necessary.
## Feedback
Please read CONTRIBUTING and raise issues here in Github.
| Update project status to inactive. | Update project status to inactive. | Markdown | apache-2.0 | googlearchive/go-gcm | markdown | ## Code Before:
GCM Library for Go
--
Provides the following functionality for Google Cloud Messaging:
1. Sending messages.
2. Listening to receiving messages.
Documentation: http://godoc.org/github.com/google/go-gcm
## Installation
$ go get github.com/google/go-gcm
## Status
This library is in Alpha. We will make an effort to support the library, but we reserve the right to make incompatible changes when necessary.
## Feedback
Please read CONTRIBUTING and raise issues here in Github.
## Instruction:
Update project status to inactive.
## Code After:

This project is no longer actively maintained, and remains here as an archive of this work.
For a replacement, check out [this actively maintained fork](https://github.com/kikinteractive/go-gcm) of the library.
GCM Library for Go
--
Provides the following functionality for Google Cloud Messaging:
1. Sending messages.
2. Listening to receiving messages.
Documentation: http://godoc.org/github.com/google/go-gcm
## Installation
$ go get github.com/google/go-gcm
## Status
This library is in Alpha. We will make an effort to support the library, but we reserve the right to make incompatible changes when necessary.
## Feedback
Please read CONTRIBUTING and raise issues here in Github.
| + 
+
+ This project is no longer actively maintained, and remains here as an archive of this work.
+
+ For a replacement, check out [this actively maintained fork](https://github.com/kikinteractive/go-gcm) of the library.
+
GCM Library for Go
--
Provides the following functionality for Google Cloud Messaging:
1. Sending messages.
2. Listening to receiving messages.
Documentation: http://godoc.org/github.com/google/go-gcm
## Installation
$ go get github.com/google/go-gcm
## Status
This library is in Alpha. We will make an effort to support the library, but we reserve the right to make incompatible changes when necessary.
## Feedback
Please read CONTRIBUTING and raise issues here in Github. | 6 | 0.285714 | 6 | 0 |
1e36b9a0ebbd584573a9fcb5f2a5c05288be0146 | src/blue_salamander/r3elements.cljs | src/blue_salamander/r3elements.cljs | (ns blue-salamander.r3elements
(:require-macros [blue-salamander.r3elements :as abbrev])
(:require [goog.events :as events]
[clojure.string :as string]))
(defn element-args [opts children]
(cond
(nil? opts) [nil children]
(map? opts) [(clj->js opts) children]
(object? opts) [opts children]))
(abbrev/defn-r3-element Renderer)
(abbrev/defn-r3-element Scene)
(abbrev/defn-r3-element Mesh)
(abbrev/defn-r3-element Object3D)
(abbrev/defn-r3-element PerspectiveCamera)
| (ns blue-salamander.r3elements
(:require-macros [blue-salamander.r3elements :as abbrev])
(:require [goog.events :as events]
[clojure.string :as string]))
;;
;; provides convenient definitions for threejs nodes. Instead of going:
;;
;; (js/React.createElement (js/ReactTHREE.Scene #js {} ....))
;;
;; you go:
;;
;; < require this ns as 'r3' >
;;
;; (r3/scene {} ...)
;;
(defn element-args [opts children]
(cond
(nil? opts) [nil children]
(map? opts) [(clj->js opts) children]
(object? opts) [opts children]))
(abbrev/defn-r3-element Renderer)
(abbrev/defn-r3-element Scene)
(abbrev/defn-r3-element Mesh)
(abbrev/defn-r3-element Object3D)
(abbrev/defn-r3-element PerspectiveCamera)
| Comment in element convenience funcs | Comment in element convenience funcs
| Clojure | mit | Izzimach/blue-salamander | clojure | ## Code Before:
(ns blue-salamander.r3elements
(:require-macros [blue-salamander.r3elements :as abbrev])
(:require [goog.events :as events]
[clojure.string :as string]))
(defn element-args [opts children]
(cond
(nil? opts) [nil children]
(map? opts) [(clj->js opts) children]
(object? opts) [opts children]))
(abbrev/defn-r3-element Renderer)
(abbrev/defn-r3-element Scene)
(abbrev/defn-r3-element Mesh)
(abbrev/defn-r3-element Object3D)
(abbrev/defn-r3-element PerspectiveCamera)
## Instruction:
Comment in element convenience funcs
## Code After:
(ns blue-salamander.r3elements
(:require-macros [blue-salamander.r3elements :as abbrev])
(:require [goog.events :as events]
[clojure.string :as string]))
;;
;; provides convenient definitions for threejs nodes. Instead of going:
;;
;; (js/React.createElement (js/ReactTHREE.Scene #js {} ....))
;;
;; you go:
;;
;; < require this ns as 'r3' >
;;
;; (r3/scene {} ...)
;;
(defn element-args [opts children]
(cond
(nil? opts) [nil children]
(map? opts) [(clj->js opts) children]
(object? opts) [opts children]))
(abbrev/defn-r3-element Renderer)
(abbrev/defn-r3-element Scene)
(abbrev/defn-r3-element Mesh)
(abbrev/defn-r3-element Object3D)
(abbrev/defn-r3-element PerspectiveCamera)
| (ns blue-salamander.r3elements
(:require-macros [blue-salamander.r3elements :as abbrev])
(:require [goog.events :as events]
[clojure.string :as string]))
+
+ ;;
+ ;; provides convenient definitions for threejs nodes. Instead of going:
+ ;;
+ ;; (js/React.createElement (js/ReactTHREE.Scene #js {} ....))
+ ;;
+ ;; you go:
+ ;;
+ ;; < require this ns as 'r3' >
+ ;;
+ ;; (r3/scene {} ...)
+ ;;
+
(defn element-args [opts children]
(cond
(nil? opts) [nil children]
(map? opts) [(clj->js opts) children]
(object? opts) [opts children]))
(abbrev/defn-r3-element Renderer)
(abbrev/defn-r3-element Scene)
(abbrev/defn-r3-element Mesh)
(abbrev/defn-r3-element Object3D)
(abbrev/defn-r3-element PerspectiveCamera) | 13 | 0.8125 | 13 | 0 |
a385decbdb81dacc73b38e09b7e5b9c32c93d7e2 | .travis.yml | .travis.yml | language: ruby
rvm:
- "2.1.10"
- "2.2.5"
- "2.3.1"
env:
- "RAILS_VERSION=3.2.0"
- "RAILS_VERSION=4.0.0"
- "RAILS_VERSION=4.1.0"
- "RAILS_VERSION=4.2.0"
- "RAILS_VERSION=5.0.0"
install:
- bundle update --without development
script: TRAVIS=true bundle exec rake spec
| language: ruby
rvm:
- "2.2.5"
- "2.3.1"
env:
- "RAILS_VERSION=3.2.0"
- "RAILS_VERSION=4.0.0"
- "RAILS_VERSION=4.1.0"
- "RAILS_VERSION=4.2.0"
- "RAILS_VERSION=5.0.0"
install:
- bundle update
script: TRAVIS=true bundle exec rake spec
| Drop test for Ruby 2.1.0 | Drop test for Ruby 2.1.0
| YAML | mit | magnusvk/counter_culture,magnusvk/counter_culture | yaml | ## Code Before:
language: ruby
rvm:
- "2.1.10"
- "2.2.5"
- "2.3.1"
env:
- "RAILS_VERSION=3.2.0"
- "RAILS_VERSION=4.0.0"
- "RAILS_VERSION=4.1.0"
- "RAILS_VERSION=4.2.0"
- "RAILS_VERSION=5.0.0"
install:
- bundle update --without development
script: TRAVIS=true bundle exec rake spec
## Instruction:
Drop test for Ruby 2.1.0
## Code After:
language: ruby
rvm:
- "2.2.5"
- "2.3.1"
env:
- "RAILS_VERSION=3.2.0"
- "RAILS_VERSION=4.0.0"
- "RAILS_VERSION=4.1.0"
- "RAILS_VERSION=4.2.0"
- "RAILS_VERSION=5.0.0"
install:
- bundle update
script: TRAVIS=true bundle exec rake spec
| language: ruby
rvm:
- - "2.1.10"
- "2.2.5"
- "2.3.1"
env:
- "RAILS_VERSION=3.2.0"
- "RAILS_VERSION=4.0.0"
- "RAILS_VERSION=4.1.0"
- "RAILS_VERSION=4.2.0"
- "RAILS_VERSION=5.0.0"
install:
- - bundle update --without development
+ - bundle update
script: TRAVIS=true bundle exec rake spec | 3 | 0.214286 | 1 | 2 |
2f3d0fd3362828f14587569ab9394c90bc9845fb | README.md | README.md |
A simple terminal utility that allows sending key presses to a remote [Kodi](http://kodi.tv/) media center. For example, you could run kodikeys on your laptop as a simple remote control keyboard.
## Features
* Send key presses to Kodi's [EventServer](http://kodi.wiki/view/EventServer)
* Search and input request prompts using [JSON-RPC](http://kodi.wiki/view/JSON-RPC_API) api
## Installation
`npm install -g kodikeys`
## Usage
Note: In order to connect remotely to your Kodi host, first go to `Settings > Services > Remote Control` in your Kodi installation and make sure `Allow remote control by programs on other systems` is enabled.
To run:
```
kodikeys kodi-host
```
Replace `kodi-host` with the host name or IP address of your Kodi box.
To view options:
```
kodikeys -h
```
## Known Issues
Keyboard combinations using `ctrl` key do not work, as Kodi's EventServer does not seem to recognize them.
When running `npm install` you may get compile errors due to old dependencies for the `kodi-ws` package. You should still be able to run kodikeys without these dependencies installed.
## Resources
[Kodi keyboard reference](http://kodi.wiki/view/Keyboard_controls)
[Kodi EventServer](http://kodi.wiki/view/EventServer)
[Kodi JSON-RPC API](http://kodi.wiki/view/JSON-RPC_API)
|
A simple terminal utility that allows sending key presses to a remote [Kodi](http://kodi.tv/) media center. For example, you could run kodikeys on your laptop as a simple remote control keyboard.
## Features
* Send key presses to Kodi's [EventServer](http://kodi.wiki/view/EventServer)
* Search and input request prompts using [JSON-RPC](http://kodi.wiki/view/JSON-RPC_API) api
## Installation
`npm install -g kodikeys`
## Usage
Note: In order to connect remotely to your Kodi host, first go to `Settings > Services > Remote Control` in your Kodi installation and make sure `Allow remote control by programs on other systems` is enabled.
To run:
```
kodikeys kodi-host
```
Replace `kodi-host` with the host name or IP address of your Kodi box.
To view options:
```
kodikeys -h
```
## Known Issues
Keyboard combinations using `ctrl` key do not work, as Kodi's EventServer does not seem to recognize them.
Using shrinkwrapped dependencies to pull in later ws version than kodi-ws uses. Old versoins of ws package do not install properly on newer nodejs versions.
## Resources
[Kodi keyboard reference](http://kodi.wiki/view/Keyboard_controls)
[Kodi EventServer](http://kodi.wiki/view/EventServer)
[Kodi JSON-RPC API](http://kodi.wiki/view/JSON-RPC_API)
| Change known issues in readme | Change known issues in readme
| Markdown | mit | bartels/kodikeys | markdown | ## Code Before:
A simple terminal utility that allows sending key presses to a remote [Kodi](http://kodi.tv/) media center. For example, you could run kodikeys on your laptop as a simple remote control keyboard.
## Features
* Send key presses to Kodi's [EventServer](http://kodi.wiki/view/EventServer)
* Search and input request prompts using [JSON-RPC](http://kodi.wiki/view/JSON-RPC_API) api
## Installation
`npm install -g kodikeys`
## Usage
Note: In order to connect remotely to your Kodi host, first go to `Settings > Services > Remote Control` in your Kodi installation and make sure `Allow remote control by programs on other systems` is enabled.
To run:
```
kodikeys kodi-host
```
Replace `kodi-host` with the host name or IP address of your Kodi box.
To view options:
```
kodikeys -h
```
## Known Issues
Keyboard combinations using `ctrl` key do not work, as Kodi's EventServer does not seem to recognize them.
When running `npm install` you may get compile errors due to old dependencies for the `kodi-ws` package. You should still be able to run kodikeys without these dependencies installed.
## Resources
[Kodi keyboard reference](http://kodi.wiki/view/Keyboard_controls)
[Kodi EventServer](http://kodi.wiki/view/EventServer)
[Kodi JSON-RPC API](http://kodi.wiki/view/JSON-RPC_API)
## Instruction:
Change known issues in readme
## Code After:
A simple terminal utility that allows sending key presses to a remote [Kodi](http://kodi.tv/) media center. For example, you could run kodikeys on your laptop as a simple remote control keyboard.
## Features
* Send key presses to Kodi's [EventServer](http://kodi.wiki/view/EventServer)
* Search and input request prompts using [JSON-RPC](http://kodi.wiki/view/JSON-RPC_API) api
## Installation
`npm install -g kodikeys`
## Usage
Note: In order to connect remotely to your Kodi host, first go to `Settings > Services > Remote Control` in your Kodi installation and make sure `Allow remote control by programs on other systems` is enabled.
To run:
```
kodikeys kodi-host
```
Replace `kodi-host` with the host name or IP address of your Kodi box.
To view options:
```
kodikeys -h
```
## Known Issues
Keyboard combinations using `ctrl` key do not work, as Kodi's EventServer does not seem to recognize them.
Using shrinkwrapped dependencies to pull in later ws version than kodi-ws uses. Old versoins of ws package do not install properly on newer nodejs versions.
## Resources
[Kodi keyboard reference](http://kodi.wiki/view/Keyboard_controls)
[Kodi EventServer](http://kodi.wiki/view/EventServer)
[Kodi JSON-RPC API](http://kodi.wiki/view/JSON-RPC_API)
|
A simple terminal utility that allows sending key presses to a remote [Kodi](http://kodi.tv/) media center. For example, you could run kodikeys on your laptop as a simple remote control keyboard.
## Features
* Send key presses to Kodi's [EventServer](http://kodi.wiki/view/EventServer)
* Search and input request prompts using [JSON-RPC](http://kodi.wiki/view/JSON-RPC_API) api
## Installation
`npm install -g kodikeys`
## Usage
Note: In order to connect remotely to your Kodi host, first go to `Settings > Services > Remote Control` in your Kodi installation and make sure `Allow remote control by programs on other systems` is enabled.
To run:
```
kodikeys kodi-host
```
Replace `kodi-host` with the host name or IP address of your Kodi box.
To view options:
```
kodikeys -h
```
## Known Issues
Keyboard combinations using `ctrl` key do not work, as Kodi's EventServer does not seem to recognize them.
- When running `npm install` you may get compile errors due to old dependencies for the `kodi-ws` package. You should still be able to run kodikeys without these dependencies installed.
+ Using shrinkwrapped dependencies to pull in later ws version than kodi-ws uses. Old versoins of ws package do not install properly on newer nodejs versions.
## Resources
[Kodi keyboard reference](http://kodi.wiki/view/Keyboard_controls)
[Kodi EventServer](http://kodi.wiki/view/EventServer)
[Kodi JSON-RPC API](http://kodi.wiki/view/JSON-RPC_API) | 2 | 0.052632 | 1 | 1 |
cf5faa04971dc31e288726270a01e1556c1935a0 | README.md | README.md |
This project rocks and uses MIT-LICENSE.
## Usage
honoka-rails is easy to drop into Rails with the asset pipeline.
```rb
gem 'sass-rails', '>= 3.2'
gem 'honoka-rails'
```
bundle install and restart your server to make the files available through the pipeline.
Import Bootstrap(Honoka) styles in app/assets/stylesheets/application.scss:
```
$ mv app/assets/stylesheets/application.css app/assets/stylesheets/application.scss
```
"bootstrap-sprockets" must be imported before "honoka" and "honoka/variables"
```scss
@import "bootstrap-sprockets";
@import "honoka";
```
Then, remove all the
``*= require_self`` and ``*= require_tree .`` statements from the sass file. Instead,use @import to import Sass files.
Do not use ``*= require`` in Sass or your other stylesheets will not be able to access the Bootstrap mixins or variables.
Require Bootstrap Javascripts in app/assets/javascripts/application.js:
```js
//= require jquery
//= require bootstrap-sprockets
```
bootstrap-sprockets and bootstrap should not both be included in application.js.
## References
* [bootstrap-sass](https://github.com/twbs/bootstrap-sass)
* [Honoka](http://honokak.osaka/)
|
This project rocks and uses MIT-LICENSE.
## Usage
honoka-rails is easy to drop into Rails with the asset pipeline.
```rb
gem 'sass-rails', '>= 3.2'
gem 'honoka-rails'
```
bundle install and restart your server to make the files available through the pipeline.
Import Bootstrap(Honoka) styles in app/assets/stylesheets/application.scss:
```
$ mv app/assets/stylesheets/application.css app/assets/stylesheets/application.scss
```
"bootstrap-sprockets" must be imported before "honoka" and "honoka/variables"
```scss
@import "bootstrap-sprockets";
@import "honoka";
```
Then, remove all the
``*= require_self`` and ``*= require_tree .`` statements from the sass file. Instead,use @import to import Sass files.
Do not use ``*= require`` in Sass or your other stylesheets will not be able to access the Bootstrap mixins or variables.
Require Bootstrap Javascripts in app/assets/javascripts/application.js:
```js
//= require jquery
//= require bootstrap-sprockets
```
bootstrap-sprockets and bootstrap should not both be included in application.js.
## References
* [bootstrap-sass](https://github.com/twbs/bootstrap-sass)
* [Honoka](https://github.com/windyakin/Honoka)
| Fix honoka url in readme | Fix honoka url in readme
| Markdown | mit | iguchi1124/honoka-rails,iguchi1124/honoka-rails | markdown | ## Code Before:
This project rocks and uses MIT-LICENSE.
## Usage
honoka-rails is easy to drop into Rails with the asset pipeline.
```rb
gem 'sass-rails', '>= 3.2'
gem 'honoka-rails'
```
bundle install and restart your server to make the files available through the pipeline.
Import Bootstrap(Honoka) styles in app/assets/stylesheets/application.scss:
```
$ mv app/assets/stylesheets/application.css app/assets/stylesheets/application.scss
```
"bootstrap-sprockets" must be imported before "honoka" and "honoka/variables"
```scss
@import "bootstrap-sprockets";
@import "honoka";
```
Then, remove all the
``*= require_self`` and ``*= require_tree .`` statements from the sass file. Instead,use @import to import Sass files.
Do not use ``*= require`` in Sass or your other stylesheets will not be able to access the Bootstrap mixins or variables.
Require Bootstrap Javascripts in app/assets/javascripts/application.js:
```js
//= require jquery
//= require bootstrap-sprockets
```
bootstrap-sprockets and bootstrap should not both be included in application.js.
## References
* [bootstrap-sass](https://github.com/twbs/bootstrap-sass)
* [Honoka](http://honokak.osaka/)
## Instruction:
Fix honoka url in readme
## Code After:
This project rocks and uses MIT-LICENSE.
## Usage
honoka-rails is easy to drop into Rails with the asset pipeline.
```rb
gem 'sass-rails', '>= 3.2'
gem 'honoka-rails'
```
bundle install and restart your server to make the files available through the pipeline.
Import Bootstrap(Honoka) styles in app/assets/stylesheets/application.scss:
```
$ mv app/assets/stylesheets/application.css app/assets/stylesheets/application.scss
```
"bootstrap-sprockets" must be imported before "honoka" and "honoka/variables"
```scss
@import "bootstrap-sprockets";
@import "honoka";
```
Then, remove all the
``*= require_self`` and ``*= require_tree .`` statements from the sass file. Instead,use @import to import Sass files.
Do not use ``*= require`` in Sass or your other stylesheets will not be able to access the Bootstrap mixins or variables.
Require Bootstrap Javascripts in app/assets/javascripts/application.js:
```js
//= require jquery
//= require bootstrap-sprockets
```
bootstrap-sprockets and bootstrap should not both be included in application.js.
## References
* [bootstrap-sass](https://github.com/twbs/bootstrap-sass)
* [Honoka](https://github.com/windyakin/Honoka)
|
This project rocks and uses MIT-LICENSE.
## Usage
honoka-rails is easy to drop into Rails with the asset pipeline.
```rb
gem 'sass-rails', '>= 3.2'
gem 'honoka-rails'
```
bundle install and restart your server to make the files available through the pipeline.
Import Bootstrap(Honoka) styles in app/assets/stylesheets/application.scss:
```
$ mv app/assets/stylesheets/application.css app/assets/stylesheets/application.scss
```
"bootstrap-sprockets" must be imported before "honoka" and "honoka/variables"
```scss
@import "bootstrap-sprockets";
@import "honoka";
```
Then, remove all the
``*= require_self`` and ``*= require_tree .`` statements from the sass file. Instead,use @import to import Sass files.
Do not use ``*= require`` in Sass or your other stylesheets will not be able to access the Bootstrap mixins or variables.
Require Bootstrap Javascripts in app/assets/javascripts/application.js:
```js
//= require jquery
//= require bootstrap-sprockets
```
bootstrap-sprockets and bootstrap should not both be included in application.js.
## References
* [bootstrap-sass](https://github.com/twbs/bootstrap-sass)
- * [Honoka](http://honokak.osaka/)
+ * [Honoka](https://github.com/windyakin/Honoka) | 2 | 0.042553 | 1 | 1 |
c87c4a972f0f2d4966142fa666a900112762ed76 | scipy/constants/tests/test_codata.py | scipy/constants/tests/test_codata.py |
import warnings
from scipy.constants import find
from numpy.testing import assert_equal
def test_find():
warnings.simplefilter('ignore', DeprecationWarning)
keys = find('weak mixing', disp=False)
assert_equal(keys, ['weak mixing angle'])
keys = find('qwertyuiop', disp=False)
assert_equal(keys, [])
keys = find('natural unit', disp=False)
assert_equal(keys, sorted(['natural unit of velocity',
'natural unit of action',
'natural unit of action in eV s',
'natural unit of mass',
'natural unit of energy',
'natural unit of energy in MeV',
'natural unit of momentum',
'natural unit of momentum in MeV/c',
'natural unit of length',
'natural unit of time']))
|
import warnings
from scipy.constants import find
from numpy.testing import assert_equal, run_module_suite
def test_find():
warnings.simplefilter('ignore', DeprecationWarning)
keys = find('weak mixing', disp=False)
assert_equal(keys, ['weak mixing angle'])
keys = find('qwertyuiop', disp=False)
assert_equal(keys, [])
keys = find('natural unit', disp=False)
assert_equal(keys, sorted(['natural unit of velocity',
'natural unit of action',
'natural unit of action in eV s',
'natural unit of mass',
'natural unit of energy',
'natural unit of energy in MeV',
'natural unit of momentum',
'natural unit of momentum in MeV/c',
'natural unit of length',
'natural unit of time']))
if __name__ == "__main__":
run_module_suite()
| Allow codata tests to be run as script. | ENH: Allow codata tests to be run as script.
| Python | bsd-3-clause | zerothi/scipy,zxsted/scipy,josephcslater/scipy,rgommers/scipy,grlee77/scipy,sargas/scipy,dch312/scipy,ilayn/scipy,apbard/scipy,jakevdp/scipy,niknow/scipy,vanpact/scipy,jakevdp/scipy,rmcgibbo/scipy,zxsted/scipy,pnedunuri/scipy,raoulbq/scipy,lhilt/scipy,mgaitan/scipy,mingwpy/scipy,maciejkula/scipy,njwilson23/scipy,Dapid/scipy,woodscn/scipy,perimosocordiae/scipy,aarchiba/scipy,pbrod/scipy,aarchiba/scipy,trankmichael/scipy,Srisai85/scipy,aarchiba/scipy,pschella/scipy,tylerjereddy/scipy,minhlongdo/scipy,mortonjt/scipy,dominicelse/scipy,jsilter/scipy,sargas/scipy,vanpact/scipy,anielsen001/scipy,mingwpy/scipy,matthew-brett/scipy,aeklant/scipy,mikebenfield/scipy,Srisai85/scipy,sauliusl/scipy,FRidh/scipy,andim/scipy,ortylp/scipy,josephcslater/scipy,mhogg/scipy,WarrenWeckesser/scipy,ndchorley/scipy,josephcslater/scipy,mortonjt/scipy,felipebetancur/scipy,jonycgn/scipy,mtrbean/scipy,juliantaylor/scipy,mdhaber/scipy,mdhaber/scipy,kleskjr/scipy,ogrisel/scipy,teoliphant/scipy,petebachant/scipy,lukauskas/scipy,Gillu13/scipy,sonnyhu/scipy,WillieMaddox/scipy,arokem/scipy,jonycgn/scipy,gdooper/scipy,ChanderG/scipy,pbrod/scipy,surhudm/scipy,rgommers/scipy,anntzer/scipy,gfyoung/scipy,sriki18/scipy,nmayorov/scipy,ogrisel/scipy,nvoron23/scipy,giorgiop/scipy,trankmichael/scipy,scipy/scipy,dominicelse/scipy,petebachant/scipy,ChanderG/scipy,ndchorley/scipy,scipy/scipy,Stefan-Endres/scipy,futurulus/scipy,giorgiop/scipy,behzadnouri/scipy,sargas/scipy,haudren/scipy,Gillu13/scipy,teoliphant/scipy,WillieMaddox/scipy,cpaulik/scipy,juliantaylor/scipy,sriki18/scipy,ortylp/scipy,zerothi/scipy,gdooper/scipy,befelix/scipy,raoulbq/scipy,sargas/scipy,newemailjdm/scipy,piyush0609/scipy,vhaasteren/scipy,sriki18/scipy,sauliusl/scipy,rgommers/scipy,zaxliu/scipy,Stefan-Endres/scipy,dch312/scipy,scipy/scipy,aeklant/scipy,cpaulik/scipy,maniteja123/scipy,zerothi/scipy,cpaulik/scipy,Eric89GXL/scipy,mgaitan/scipy,bkendzior/scipy,jonycgn/scipy,juliantaylor/scipy,felipebetancur/scipy,giorgiop/scipy,gertingold/scipy,befelix/scipy,maciejkula/scipy,vberaudi/scipy,lhilt/scipy,ChanderG/scipy,mgaitan/scipy,hainm/scipy,endolith/scipy,lukauskas/scipy,gdooper/scipy,kalvdans/scipy,richardotis/scipy,jor-/scipy,mgaitan/scipy,mtrbean/scipy,Gillu13/scipy,raoulbq/scipy,lukauskas/scipy,tylerjereddy/scipy,person142/scipy,mortada/scipy,vhaasteren/scipy,Shaswat27/scipy,vigna/scipy,pschella/scipy,surhudm/scipy,kalvdans/scipy,witcxc/scipy,piyush0609/scipy,woodscn/scipy,matthew-brett/scipy,nvoron23/scipy,matthewalbani/scipy,mdhaber/scipy,gfyoung/scipy,pnedunuri/scipy,vanpact/scipy,ilayn/scipy,FRidh/scipy,mingwpy/scipy,efiring/scipy,jakevdp/scipy,Shaswat27/scipy,matthewalbani/scipy,pizzathief/scipy,mdhaber/scipy,jonycgn/scipy,pyramania/scipy,apbard/scipy,Dapid/scipy,rmcgibbo/scipy,vhaasteren/scipy,niknow/scipy,apbard/scipy,sonnyhu/scipy,larsmans/scipy,bkendzior/scipy,perimosocordiae/scipy,maniteja123/scipy,mortada/scipy,felipebetancur/scipy,andyfaff/scipy,mikebenfield/scipy,jseabold/scipy,mtrbean/scipy,jamestwebber/scipy,anielsen001/scipy,ortylp/scipy,Gillu13/scipy,woodscn/scipy,witcxc/scipy,haudren/scipy,witcxc/scipy,andyfaff/scipy,newemailjdm/scipy,vhaasteren/scipy,njwilson23/scipy,grlee77/scipy,surhudm/scipy,zaxliu/scipy,ortylp/scipy,njwilson23/scipy,dch312/scipy,fredrikw/scipy,pnedunuri/scipy,mortada/scipy,mortada/scipy,fredrikw/scipy,gef756/scipy,zaxliu/scipy,chatcannon/scipy,matthew-brett/scipy,jsilter/scipy,ilayn/scipy,mortada/scipy,Stefan-Endres/scipy,jseabold/scipy,minhlongdo/scipy,anntzer/scipy,andyfaff/scipy,futurulus/scipy,sauliusl/scipy,e-q/scipy,mingwpy/scipy,rmcgibbo/scipy,Dapid/scipy,vigna/scipy,minhlongdo/scipy,mdhaber/scipy,raoulbq/scipy,pyramania/scipy,richardotis/scipy,jor-/scipy,befelix/scipy,sriki18/scipy,mortonjt/scipy,endolith/scipy,haudren/scipy,juliantaylor/scipy,hainm/scipy,e-q/scipy,pizzathief/scipy,endolith/scipy,FRidh/scipy,argriffing/scipy,perimosocordiae/scipy,tylerjereddy/scipy,fernand/scipy,tylerjereddy/scipy,argriffing/scipy,vigna/scipy,mhogg/scipy,vanpact/scipy,woodscn/scipy,Eric89GXL/scipy,vberaudi/scipy,ChanderG/scipy,raoulbq/scipy,gef756/scipy,sauliusl/scipy,anntzer/scipy,jseabold/scipy,pbrod/scipy,argriffing/scipy,nvoron23/scipy,ales-erjavec/scipy,ortylp/scipy,Newman101/scipy,jjhelmus/scipy,petebachant/scipy,mingwpy/scipy,chatcannon/scipy,Eric89GXL/scipy,Shaswat27/scipy,ogrisel/scipy,anntzer/scipy,andyfaff/scipy,aman-iitj/scipy,cpaulik/scipy,sauliusl/scipy,zerothi/scipy,hainm/scipy,Srisai85/scipy,scipy/scipy,mtrbean/scipy,fredrikw/scipy,endolith/scipy,Dapid/scipy,ndchorley/scipy,giorgiop/scipy,felipebetancur/scipy,Gillu13/scipy,maciejkula/scipy,jonycgn/scipy,vanpact/scipy,fernand/scipy,argriffing/scipy,aarchiba/scipy,jjhelmus/scipy,newemailjdm/scipy,matthewalbani/scipy,behzadnouri/scipy,jamestwebber/scipy,behzadnouri/scipy,andim/scipy,pschella/scipy,nvoron23/scipy,ndchorley/scipy,Shaswat27/scipy,argriffing/scipy,andyfaff/scipy,juliantaylor/scipy,felipebetancur/scipy,mtrbean/scipy,fredrikw/scipy,sonnyhu/scipy,efiring/scipy,e-q/scipy,jsilter/scipy,mtrbean/scipy,behzadnouri/scipy,grlee77/scipy,ChanderG/scipy,maciejkula/scipy,ogrisel/scipy,maciejkula/scipy,Srisai85/scipy,jamestwebber/scipy,woodscn/scipy,pschella/scipy,vigna/scipy,dch312/scipy,lhilt/scipy,haudren/scipy,nmayorov/scipy,futurulus/scipy,Newman101/scipy,larsmans/scipy,Eric89GXL/scipy,larsmans/scipy,maniteja123/scipy,maniteja123/scipy,gfyoung/scipy,fernand/scipy,sargas/scipy,nmayorov/scipy,lhilt/scipy,richardotis/scipy,vanpact/scipy,teoliphant/scipy,arokem/scipy,aeklant/scipy,pyramania/scipy,jamestwebber/scipy,vberaudi/scipy,dominicelse/scipy,efiring/scipy,rmcgibbo/scipy,ales-erjavec/scipy,Eric89GXL/scipy,niknow/scipy,zaxliu/scipy,surhudm/scipy,maniteja123/scipy,mikebenfield/scipy,gef756/scipy,vberaudi/scipy,efiring/scipy,sriki18/scipy,FRidh/scipy,gdooper/scipy,futurulus/scipy,cpaulik/scipy,WarrenWeckesser/scipy,larsmans/scipy,person142/scipy,teoliphant/scipy,kleskjr/scipy,befelix/scipy,arokem/scipy,Kamp9/scipy,kleskjr/scipy,teoliphant/scipy,kleskjr/scipy,dominicelse/scipy,ndchorley/scipy,anntzer/scipy,jjhelmus/scipy,pbrod/scipy,ilayn/scipy,Shaswat27/scipy,efiring/scipy,njwilson23/scipy,niknow/scipy,dominicelse/scipy,behzadnouri/scipy,mingwpy/scipy,rgommers/scipy,FRidh/scipy,person142/scipy,mortonjt/scipy,vhaasteren/scipy,pbrod/scipy,nvoron23/scipy,jor-/scipy,niknow/scipy,trankmichael/scipy,fernand/scipy,haudren/scipy,njwilson23/scipy,matthew-brett/scipy,nvoron23/scipy,endolith/scipy,e-q/scipy,jonycgn/scipy,dch312/scipy,WillieMaddox/scipy,rmcgibbo/scipy,lukauskas/scipy,andim/scipy,jakevdp/scipy,zerothi/scipy,witcxc/scipy,ales-erjavec/scipy,ales-erjavec/scipy,jor-/scipy,grlee77/scipy,nonhermitian/scipy,aman-iitj/scipy,vhaasteren/scipy,chatcannon/scipy,arokem/scipy,giorgiop/scipy,futurulus/scipy,e-q/scipy,ChanderG/scipy,ilayn/scipy,WarrenWeckesser/scipy,mikebenfield/scipy,Newman101/scipy,richardotis/scipy,jsilter/scipy,perimosocordiae/scipy,gfyoung/scipy,scipy/scipy,kalvdans/scipy,sonnyhu/scipy,lukauskas/scipy,WillieMaddox/scipy,aman-iitj/scipy,Newman101/scipy,zerothi/scipy,apbard/scipy,jsilter/scipy,hainm/scipy,Stefan-Endres/scipy,maniteja123/scipy,petebachant/scipy,efiring/scipy,mhogg/scipy,Dapid/scipy,nmayorov/scipy,anielsen001/scipy,Dapid/scipy,aeklant/scipy,nonhermitian/scipy,perimosocordiae/scipy,gertingold/scipy,chatcannon/scipy,zxsted/scipy,trankmichael/scipy,behzadnouri/scipy,WillieMaddox/scipy,newemailjdm/scipy,bkendzior/scipy,vberaudi/scipy,anielsen001/scipy,argriffing/scipy,minhlongdo/scipy,aman-iitj/scipy,futurulus/scipy,pizzathief/scipy,aeklant/scipy,anielsen001/scipy,cpaulik/scipy,njwilson23/scipy,zaxliu/scipy,surhudm/scipy,Kamp9/scipy,Eric89GXL/scipy,mhogg/scipy,scipy/scipy,gfyoung/scipy,befelix/scipy,Newman101/scipy,ales-erjavec/scipy,raoulbq/scipy,zaxliu/scipy,pyramania/scipy,larsmans/scipy,mortada/scipy,chatcannon/scipy,grlee77/scipy,person142/scipy,Kamp9/scipy,giorgiop/scipy,kalvdans/scipy,Stefan-Endres/scipy,mhogg/scipy,mortonjt/scipy,Kamp9/scipy,zxsted/scipy,hainm/scipy,lhilt/scipy,haudren/scipy,sauliusl/scipy,aman-iitj/scipy,aarchiba/scipy,kleskjr/scipy,andyfaff/scipy,gef756/scipy,WarrenWeckesser/scipy,fredrikw/scipy,gertingold/scipy,petebachant/scipy,petebachant/scipy,endolith/scipy,Newman101/scipy,sonnyhu/scipy,trankmichael/scipy,josephcslater/scipy,vigna/scipy,kleskjr/scipy,matthewalbani/scipy,ortylp/scipy,Gillu13/scipy,gdooper/scipy,Kamp9/scipy,larsmans/scipy,mikebenfield/scipy,matthewalbani/scipy,Shaswat27/scipy,lukauskas/scipy,pizzathief/scipy,josephcslater/scipy,pnedunuri/scipy,matthew-brett/scipy,chatcannon/scipy,bkendzior/scipy,ilayn/scipy,jjhelmus/scipy,perimosocordiae/scipy,apbard/scipy,ogrisel/scipy,anntzer/scipy,rmcgibbo/scipy,surhudm/scipy,mdhaber/scipy,bkendzior/scipy,jseabold/scipy,sriki18/scipy,arokem/scipy,kalvdans/scipy,Kamp9/scipy,Srisai85/scipy,pnedunuri/scipy,pyramania/scipy,andim/scipy,fernand/scipy,gef756/scipy,trankmichael/scipy,nonhermitian/scipy,pnedunuri/scipy,niknow/scipy,piyush0609/scipy,richardotis/scipy,jamestwebber/scipy,Stefan-Endres/scipy,woodscn/scipy,minhlongdo/scipy,fredrikw/scipy,newemailjdm/scipy,WarrenWeckesser/scipy,ales-erjavec/scipy,andim/scipy,richardotis/scipy,piyush0609/scipy,andim/scipy,jor-/scipy,FRidh/scipy,nonhermitian/scipy,tylerjereddy/scipy,jseabold/scipy,gertingold/scipy,pizzathief/scipy,jseabold/scipy,zxsted/scipy,rgommers/scipy,gertingold/scipy,WarrenWeckesser/scipy,jakevdp/scipy,mortonjt/scipy,hainm/scipy,jjhelmus/scipy,piyush0609/scipy,WillieMaddox/scipy,ndchorley/scipy,zxsted/scipy,mgaitan/scipy,person142/scipy,minhlongdo/scipy,fernand/scipy,gef756/scipy,pbrod/scipy,pschella/scipy,anielsen001/scipy,vberaudi/scipy,aman-iitj/scipy,mhogg/scipy,witcxc/scipy,sonnyhu/scipy,piyush0609/scipy,mgaitan/scipy,nmayorov/scipy,newemailjdm/scipy,felipebetancur/scipy,Srisai85/scipy,nonhermitian/scipy | python | ## Code Before:
import warnings
from scipy.constants import find
from numpy.testing import assert_equal
def test_find():
warnings.simplefilter('ignore', DeprecationWarning)
keys = find('weak mixing', disp=False)
assert_equal(keys, ['weak mixing angle'])
keys = find('qwertyuiop', disp=False)
assert_equal(keys, [])
keys = find('natural unit', disp=False)
assert_equal(keys, sorted(['natural unit of velocity',
'natural unit of action',
'natural unit of action in eV s',
'natural unit of mass',
'natural unit of energy',
'natural unit of energy in MeV',
'natural unit of momentum',
'natural unit of momentum in MeV/c',
'natural unit of length',
'natural unit of time']))
## Instruction:
ENH: Allow codata tests to be run as script.
## Code After:
import warnings
from scipy.constants import find
from numpy.testing import assert_equal, run_module_suite
def test_find():
warnings.simplefilter('ignore', DeprecationWarning)
keys = find('weak mixing', disp=False)
assert_equal(keys, ['weak mixing angle'])
keys = find('qwertyuiop', disp=False)
assert_equal(keys, [])
keys = find('natural unit', disp=False)
assert_equal(keys, sorted(['natural unit of velocity',
'natural unit of action',
'natural unit of action in eV s',
'natural unit of mass',
'natural unit of energy',
'natural unit of energy in MeV',
'natural unit of momentum',
'natural unit of momentum in MeV/c',
'natural unit of length',
'natural unit of time']))
if __name__ == "__main__":
run_module_suite()
|
import warnings
from scipy.constants import find
- from numpy.testing import assert_equal
+ from numpy.testing import assert_equal, run_module_suite
? ++++++++++++++++++
def test_find():
warnings.simplefilter('ignore', DeprecationWarning)
keys = find('weak mixing', disp=False)
assert_equal(keys, ['weak mixing angle'])
keys = find('qwertyuiop', disp=False)
assert_equal(keys, [])
keys = find('natural unit', disp=False)
assert_equal(keys, sorted(['natural unit of velocity',
'natural unit of action',
'natural unit of action in eV s',
'natural unit of mass',
'natural unit of energy',
'natural unit of energy in MeV',
'natural unit of momentum',
'natural unit of momentum in MeV/c',
'natural unit of length',
'natural unit of time']))
+
+ if __name__ == "__main__":
+ run_module_suite() | 5 | 0.185185 | 4 | 1 |
a05fedf44417f6d9da607957cc2be8535df7aa41 | core/src/main/scala/experimentation/Experimental.scala | core/src/main/scala/experimentation/Experimental.scala | package funnel
package experimentation
import scala.collection.concurrent.TrieMap
/** A wrapper for instrument constructors to support experimentation and A/B testing */
case class Experimental[I,K](underlying: (Key[K] => Key[K]) => I) {
val memo = new TrieMap[(ExperimentID, GroupID), I]
def apply(token: Map[ExperimentID, GroupID])(f: I => Unit): Unit =
token.foreach {
case (e, g) => f(memo.getOrElseUpdate(
(e, g), underlying(_.setAttribute(AttributeKeys.experimentID, e)
.setAttribute(AttributeKeys.experimentGroup, g))))
}
}
| package funnel
package experimentation
import scala.collection.concurrent.TrieMap
/** A wrapper for instrument constructors to support experimentation and A/B testing */
case class Experimental[I,K](underlying: (Key[K] => Key[K]) => I) {
val memo = new TrieMap[(ExperimentID, GroupID), I]
val unadorned = underlying(identity)
def apply(token: Map[ExperimentID, GroupID])(f: I => Unit): Unit =
if (token.isEmpty)
f(unadorned)
else token.foreach {
case (e, g) => f(memo.getOrElseUpdate(
(e, g), underlying(_.setAttribute(AttributeKeys.experimentID, e)
.setAttribute(AttributeKeys.experimentGroup, g))))
}
}
| Handle the case when there are no experiments | Handle the case when there are no experiments
| Scala | apache-2.0 | oncue/funnel,neigor/funnel,neigor/funnel,oncue/funnel | scala | ## Code Before:
package funnel
package experimentation
import scala.collection.concurrent.TrieMap
/** A wrapper for instrument constructors to support experimentation and A/B testing */
case class Experimental[I,K](underlying: (Key[K] => Key[K]) => I) {
val memo = new TrieMap[(ExperimentID, GroupID), I]
def apply(token: Map[ExperimentID, GroupID])(f: I => Unit): Unit =
token.foreach {
case (e, g) => f(memo.getOrElseUpdate(
(e, g), underlying(_.setAttribute(AttributeKeys.experimentID, e)
.setAttribute(AttributeKeys.experimentGroup, g))))
}
}
## Instruction:
Handle the case when there are no experiments
## Code After:
package funnel
package experimentation
import scala.collection.concurrent.TrieMap
/** A wrapper for instrument constructors to support experimentation and A/B testing */
case class Experimental[I,K](underlying: (Key[K] => Key[K]) => I) {
val memo = new TrieMap[(ExperimentID, GroupID), I]
val unadorned = underlying(identity)
def apply(token: Map[ExperimentID, GroupID])(f: I => Unit): Unit =
if (token.isEmpty)
f(unadorned)
else token.foreach {
case (e, g) => f(memo.getOrElseUpdate(
(e, g), underlying(_.setAttribute(AttributeKeys.experimentID, e)
.setAttribute(AttributeKeys.experimentGroup, g))))
}
}
| package funnel
package experimentation
import scala.collection.concurrent.TrieMap
/** A wrapper for instrument constructors to support experimentation and A/B testing */
case class Experimental[I,K](underlying: (Key[K] => Key[K]) => I) {
val memo = new TrieMap[(ExperimentID, GroupID), I]
+ val unadorned = underlying(identity)
def apply(token: Map[ExperimentID, GroupID])(f: I => Unit): Unit =
+ if (token.isEmpty)
+ f(unadorned)
- token.foreach {
+ else token.foreach {
? +++++
case (e, g) => f(memo.getOrElseUpdate(
(e, g), underlying(_.setAttribute(AttributeKeys.experimentID, e)
.setAttribute(AttributeKeys.experimentGroup, g))))
}
}
| 5 | 0.294118 | 4 | 1 |
df2301e1c0d4403743918ad0e4fe7afab95efb4e | ghost/admin/app/controllers/application.js | ghost/admin/app/controllers/application.js | /* eslint-disable ghost/ember/alias-model-in-controller */
import Controller from '@ember/controller';
import {inject as service} from '@ember/service';
export default class ApplicationController extends Controller {
@service billing;
@service customViews;
@service config;
@service dropdown;
@service router;
@service session;
@service settings;
@service ui;
get showBilling() {
return this.config.get('hostSettings.billing.enabled');
}
get showNavMenu() {
// if we're in fullscreen mode don't show the nav menu
if (this.ui.isFullScreen) {
return false;
}
// we need to defer showing the navigation menu until the session.user
// promise has fulfilled so that gh-user-can-admin has the correct data
if (!this.session.isAuthenticated || !this.session.user.isFulfilled) {
return false;
}
return (this.router.currentRouteName !== 'error404' || this.session.isAuthenticated)
&& !this.router.currentRouteName.match(/(signin|signup|setup|reset)/);
}
}
| /* eslint-disable ghost/ember/alias-model-in-controller */
import Controller from '@ember/controller';
import {computed} from '@ember/object';
import {inject as service} from '@ember/service';
export default Controller.extend({
billing: service(),
customViews: service(),
config: service(),
dropdown: service(),
router: service(),
session: service(),
settings: service(),
ui: service(),
showBilling: computed.reads('config.hostSettings.billing.enabled'),
showNavMenu: computed('router.currentRouteName', 'session.{isAuthenticated,user.isFulfilled}', 'ui.isFullScreen', function () {
let {router, session, ui} = this;
// if we're in fullscreen mode don't show the nav menu
if (ui.isFullScreen) {
return false;
}
// we need to defer showing the navigation menu until the session.user
// promise has fulfilled so that gh-user-can-admin has the correct data
if (!session.isAuthenticated || !session.user.isFulfilled) {
return false;
}
return (router.currentRouteName !== 'error404' || session.isAuthenticated)
&& !router.currentRouteName.match(/(signin|signup|setup|reset)/);
})
});
| Revert "Refactored ApplicationController to use native class" | Revert "Refactored ApplicationController to use native class"
This reverts commit 9b6d4822e72425ceec192723f4d469060afe1ea5.
- there is an issue with properties not being tracked correctly and the menu not being shown when returning from the editor
- reverting to working version with computed properties for now
| JavaScript | mit | TryGhost/Ghost,TryGhost/Ghost,TryGhost/Ghost | javascript | ## Code Before:
/* eslint-disable ghost/ember/alias-model-in-controller */
import Controller from '@ember/controller';
import {inject as service} from '@ember/service';
export default class ApplicationController extends Controller {
@service billing;
@service customViews;
@service config;
@service dropdown;
@service router;
@service session;
@service settings;
@service ui;
get showBilling() {
return this.config.get('hostSettings.billing.enabled');
}
get showNavMenu() {
// if we're in fullscreen mode don't show the nav menu
if (this.ui.isFullScreen) {
return false;
}
// we need to defer showing the navigation menu until the session.user
// promise has fulfilled so that gh-user-can-admin has the correct data
if (!this.session.isAuthenticated || !this.session.user.isFulfilled) {
return false;
}
return (this.router.currentRouteName !== 'error404' || this.session.isAuthenticated)
&& !this.router.currentRouteName.match(/(signin|signup|setup|reset)/);
}
}
## Instruction:
Revert "Refactored ApplicationController to use native class"
This reverts commit 9b6d4822e72425ceec192723f4d469060afe1ea5.
- there is an issue with properties not being tracked correctly and the menu not being shown when returning from the editor
- reverting to working version with computed properties for now
## Code After:
/* eslint-disable ghost/ember/alias-model-in-controller */
import Controller from '@ember/controller';
import {computed} from '@ember/object';
import {inject as service} from '@ember/service';
export default Controller.extend({
billing: service(),
customViews: service(),
config: service(),
dropdown: service(),
router: service(),
session: service(),
settings: service(),
ui: service(),
showBilling: computed.reads('config.hostSettings.billing.enabled'),
showNavMenu: computed('router.currentRouteName', 'session.{isAuthenticated,user.isFulfilled}', 'ui.isFullScreen', function () {
let {router, session, ui} = this;
// if we're in fullscreen mode don't show the nav menu
if (ui.isFullScreen) {
return false;
}
// we need to defer showing the navigation menu until the session.user
// promise has fulfilled so that gh-user-can-admin has the correct data
if (!session.isAuthenticated || !session.user.isFulfilled) {
return false;
}
return (router.currentRouteName !== 'error404' || session.isAuthenticated)
&& !router.currentRouteName.match(/(signin|signup|setup|reset)/);
})
});
| /* eslint-disable ghost/ember/alias-model-in-controller */
import Controller from '@ember/controller';
+ import {computed} from '@ember/object';
import {inject as service} from '@ember/service';
- export default class ApplicationController extends Controller {
- @service billing;
- @service customViews;
- @service config;
- @service dropdown;
- @service router;
- @service session;
- @service settings;
- @service ui;
+ export default Controller.extend({
+ billing: service(),
+ customViews: service(),
+ config: service(),
+ dropdown: service(),
+ router: service(),
+ session: service(),
+ settings: service(),
+ ui: service(),
- get showBilling() {
- return this.config.get('hostSettings.billing.enabled');
- }
+ showBilling: computed.reads('config.hostSettings.billing.enabled'),
+ showNavMenu: computed('router.currentRouteName', 'session.{isAuthenticated,user.isFulfilled}', 'ui.isFullScreen', function () {
+ let {router, session, ui} = this;
- get showNavMenu() {
// if we're in fullscreen mode don't show the nav menu
- if (this.ui.isFullScreen) {
? -----
+ if (ui.isFullScreen) {
return false;
}
// we need to defer showing the navigation menu until the session.user
// promise has fulfilled so that gh-user-can-admin has the correct data
- if (!this.session.isAuthenticated || !this.session.user.isFulfilled) {
? ----- -----
+ if (!session.isAuthenticated || !session.user.isFulfilled) {
return false;
}
- return (this.router.currentRouteName !== 'error404' || this.session.isAuthenticated)
? ----- -----
+ return (router.currentRouteName !== 'error404' || session.isAuthenticated)
- && !this.router.currentRouteName.match(/(signin|signup|setup|reset)/);
? -----
+ && !router.currentRouteName.match(/(signin|signup|setup|reset)/);
- }
+ })
? +
- }
+ }); | 38 | 1.117647 | 19 | 19 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.