commit stringlengths 40 40 | old_file stringlengths 4 184 | new_file stringlengths 4 184 | old_contents stringlengths 1 3.6k | new_contents stringlengths 5 3.38k | subject stringlengths 15 778 | message stringlengths 16 6.74k | lang stringclasses 201 values | license stringclasses 13 values | repos stringlengths 6 116k | config stringclasses 201 values | content stringlengths 137 7.24k | diff stringlengths 26 5.55k | diff_length int64 1 123 | relative_diff_length float64 0.01 89 | n_lines_added int64 0 108 | n_lines_deleted int64 0 106 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
937b539dbfccc1f42e3ed03a370b572f0cbad87d | app/services/config_data_sources/project_data_source.rb | app/services/config_data_sources/project_data_source.rb | module FastlaneCI
# Abstract base class for all config data sources
class ProjectDataSource
def projects
not_implemented(__method__)
end
def git_repos
not_implemented(__method__)
end
def refresh_repo
not_implemented(__method__)
end
def save_git_repo_configs!(git_repo_configs: nil)
not_implemented(__method__)
end
def create_project!(
name: nil,
repo_config: nil,
enabled: nil,
platform: nil,
lane: nil,
artifact_provider: nil,
job_triggers: nil
)
not_implemented(__method__)
end
def update_project!(project: nil)
not_implemented(__method__)
end
def delete_project!(project: nil)
not_implemented(__method__)
end
end
end
| module FastlaneCI
# Abstract base class for all config data sources
class ProjectDataSource
def projects
not_implemented(__method__)
end
def refresh_repo
not_implemented(__method__)
end
def create_project!(
name: nil,
repo_config: nil,
enabled: nil,
platform: nil,
lane: nil,
artifact_provider: nil,
job_triggers: nil
)
not_implemented(__method__)
end
def update_project!(project: nil)
not_implemented(__method__)
end
def delete_project!(project: nil)
not_implemented(__method__)
end
end
end
| Remove unused method from interface also | Remove unused method from interface also | Ruby | mit | fastlane/ci,fastlane/ci,fastlane/ci,fastlane/ci,fastlane/ci | ruby | ## Code Before:
module FastlaneCI
# Abstract base class for all config data sources
class ProjectDataSource
def projects
not_implemented(__method__)
end
def git_repos
not_implemented(__method__)
end
def refresh_repo
not_implemented(__method__)
end
def save_git_repo_configs!(git_repo_configs: nil)
not_implemented(__method__)
end
def create_project!(
name: nil,
repo_config: nil,
enabled: nil,
platform: nil,
lane: nil,
artifact_provider: nil,
job_triggers: nil
)
not_implemented(__method__)
end
def update_project!(project: nil)
not_implemented(__method__)
end
def delete_project!(project: nil)
not_implemented(__method__)
end
end
end
## Instruction:
Remove unused method from interface also
## Code After:
module FastlaneCI
# Abstract base class for all config data sources
class ProjectDataSource
def projects
not_implemented(__method__)
end
def refresh_repo
not_implemented(__method__)
end
def create_project!(
name: nil,
repo_config: nil,
enabled: nil,
platform: nil,
lane: nil,
artifact_provider: nil,
job_triggers: nil
)
not_implemented(__method__)
end
def update_project!(project: nil)
not_implemented(__method__)
end
def delete_project!(project: nil)
not_implemented(__method__)
end
end
end
| module FastlaneCI
# Abstract base class for all config data sources
class ProjectDataSource
def projects
not_implemented(__method__)
end
- def git_repos
- not_implemented(__method__)
- end
-
def refresh_repo
- not_implemented(__method__)
- end
-
- def save_git_repo_configs!(git_repo_configs: nil)
not_implemented(__method__)
end
def create_project!(
name: nil,
repo_config: nil,
enabled: nil,
platform: nil,
lane: nil,
artifact_provider: nil,
job_triggers: nil
)
not_implemented(__method__)
end
def update_project!(project: nil)
not_implemented(__method__)
end
def delete_project!(project: nil)
not_implemented(__method__)
end
end
end | 8 | 0.2 | 0 | 8 |
fa1941983cf078837e3d053a9fe08b6c39d76153 | README.md | README.md | A command line tool to search google
[](https://badge.fury.io/js/node-cli-google)
[](https://nodei.co/npm/node-cli-google/)
[](https://github.com/feross/standard)
####Install
* Firstly, get [Node.js and npm](https://docs.npmjs.com/getting-started/installing-node).
* Then, install the package,
<pre lang="javascript">
<code>
npm install node-cli-google -g
</code>
</pre>
* At last, you can write down on the console what you expect to find:
<pre lang="javascript">
<code>
search node js development
</code>
</pre>
After you choose the links after search, when you enter, you can get the links opened in the browser immediately.
[logo]: http://i64.tinypic.com/30icens.png"

| A command line tool to search google
[](https://badge.fury.io/js/node-cli-google)
[](https://nodei.co/npm/node-cli-google/)
[](https://github.com/feross/standard)
####Install
* Firstly, get [Node.js and npm](https://docs.npmjs.com/getting-started/installing-node).
* Then, install the package,
<pre lang="javascript">
<code>
npm install node-cli-google -g
</code>
</pre>
* At last, you can write down on the console what you expect to find:
<pre lang="javascript">
<code>
search node js development
</code>
</pre>
After you choose the links after search, when you enter, you can get the links opened in the browser immediately.
[logo]: http://i64.tinypic.com/30icens.png"

| Update JS Standard Style Badge to SVG | Update JS Standard Style Badge to SVG | Markdown | mit | hwclass/node-cli-google | markdown | ## Code Before:
A command line tool to search google
[](https://badge.fury.io/js/node-cli-google)
[](https://nodei.co/npm/node-cli-google/)
[](https://github.com/feross/standard)
####Install
* Firstly, get [Node.js and npm](https://docs.npmjs.com/getting-started/installing-node).
* Then, install the package,
<pre lang="javascript">
<code>
npm install node-cli-google -g
</code>
</pre>
* At last, you can write down on the console what you expect to find:
<pre lang="javascript">
<code>
search node js development
</code>
</pre>
After you choose the links after search, when you enter, you can get the links opened in the browser immediately.
[logo]: http://i64.tinypic.com/30icens.png"

## Instruction:
Update JS Standard Style Badge to SVG
## Code After:
A command line tool to search google
[](https://badge.fury.io/js/node-cli-google)
[](https://nodei.co/npm/node-cli-google/)
[](https://github.com/feross/standard)
####Install
* Firstly, get [Node.js and npm](https://docs.npmjs.com/getting-started/installing-node).
* Then, install the package,
<pre lang="javascript">
<code>
npm install node-cli-google -g
</code>
</pre>
* At last, you can write down on the console what you expect to find:
<pre lang="javascript">
<code>
search node js development
</code>
</pre>
After you choose the links after search, when you enter, you can get the links opened in the browser immediately.
[logo]: http://i64.tinypic.com/30icens.png"

| A command line tool to search google
[](https://badge.fury.io/js/node-cli-google)
[](https://nodei.co/npm/node-cli-google/)
- [](https://github.com/feross/standard)
? - -------------- ^^
+ [](https://github.com/feross/standard)
? ++++ ^^
####Install
* Firstly, get [Node.js and npm](https://docs.npmjs.com/getting-started/installing-node).
* Then, install the package,
<pre lang="javascript">
<code>
npm install node-cli-google -g
</code>
</pre>
* At last, you can write down on the console what you expect to find:
<pre lang="javascript">
<code>
search node js development
</code>
</pre>
After you choose the links after search, when you enter, you can get the links opened in the browser immediately.
[logo]: http://i64.tinypic.com/30icens.png"
 | 2 | 0.066667 | 1 | 1 |
b5d324e7b793c3375f335c081c9974904fd1c592 | app/assets/javascripts/d3/d3_chart_serie_select.js | app/assets/javascripts/d3/d3_chart_serie_select.js | var D3ChartSerieSelect = (function () {
function setChartSerie(){
this.updateChart();
}
function createOptions(){
var html_options = [];
this.options.forEach(function(option) {
html_options += '<option value=' + option + '>' +
I18n.t("output_element_series." + option) +
'</option>';
});
return html_options;
}
function buildSelectBox(){
return $("<select/>")
.addClass("d3-chart-date-select")
.append(createOptions.call(this))
.on('change', setChartSerie.bind(this));
}
D3ChartSerieSelect.prototype = {
selectBox: undefined,
draw: function (updateChart) {
this.updateChart = updateChart;
this.selectBox = buildSelectBox.call(this);
this.scope.append(this.selectBox);
}
};
function D3ChartSerieSelect(scope, options) {
this.scope = $(scope);
this.options = options;
}
return D3ChartSerieSelect;
}());
| var D3ChartSerieSelect = (function () {
function createOptions(options){
return options.map(function(option) {
return '<option value=' + option + '>' +
I18n.t("output_element_series." + option) +
'</option>';
});
}
function buildSelectBox(options, updateChart){
return $("<select/>")
.addClass("d3-chart-date-select")
.append(createOptions(options))
.on('change', updateChart);
}
D3ChartSerieSelect.prototype = {
selectBox: undefined,
draw: function (updateChart) {
this.selectBox = buildSelectBox(this.options, updateChart);
this.scope.append(this.selectBox);
}
};
function D3ChartSerieSelect(scope, options) {
this.scope = $(scope);
this.options = options;
}
return D3ChartSerieSelect;
}());
| Remove magic this from serie select | Remove magic this from serie select
| JavaScript | mit | quintel/etmodel,quintel/etmodel,quintel/etmodel,quintel/etmodel | javascript | ## Code Before:
var D3ChartSerieSelect = (function () {
function setChartSerie(){
this.updateChart();
}
function createOptions(){
var html_options = [];
this.options.forEach(function(option) {
html_options += '<option value=' + option + '>' +
I18n.t("output_element_series." + option) +
'</option>';
});
return html_options;
}
function buildSelectBox(){
return $("<select/>")
.addClass("d3-chart-date-select")
.append(createOptions.call(this))
.on('change', setChartSerie.bind(this));
}
D3ChartSerieSelect.prototype = {
selectBox: undefined,
draw: function (updateChart) {
this.updateChart = updateChart;
this.selectBox = buildSelectBox.call(this);
this.scope.append(this.selectBox);
}
};
function D3ChartSerieSelect(scope, options) {
this.scope = $(scope);
this.options = options;
}
return D3ChartSerieSelect;
}());
## Instruction:
Remove magic this from serie select
## Code After:
var D3ChartSerieSelect = (function () {
function createOptions(options){
return options.map(function(option) {
return '<option value=' + option + '>' +
I18n.t("output_element_series." + option) +
'</option>';
});
}
function buildSelectBox(options, updateChart){
return $("<select/>")
.addClass("d3-chart-date-select")
.append(createOptions(options))
.on('change', updateChart);
}
D3ChartSerieSelect.prototype = {
selectBox: undefined,
draw: function (updateChart) {
this.selectBox = buildSelectBox(this.options, updateChart);
this.scope.append(this.selectBox);
}
};
function D3ChartSerieSelect(scope, options) {
this.scope = $(scope);
this.options = options;
}
return D3ChartSerieSelect;
}());
| var D3ChartSerieSelect = (function () {
- function setChartSerie(){
- this.updateChart();
+ function createOptions(options){
+ return options.map(function(option) {
+ return '<option value=' + option + '>' +
+ I18n.t("output_element_series." + option) +
+ '</option>';
+ });
}
+ function buildSelectBox(options, updateChart){
- function createOptions(){
- var html_options = [];
- this.options.forEach(function(option) {
- html_options += '<option value=' + option + '>' +
- I18n.t("output_element_series." + option) +
- '</option>';
- });
- return html_options;
- }
-
- function buildSelectBox(){
return $("<select/>")
.addClass("d3-chart-date-select")
- .append(createOptions.call(this))
? ----- -
+ .append(createOptions(options))
? ++ ++
- .on('change', setChartSerie.bind(this));
+ .on('change', updateChart);
}
D3ChartSerieSelect.prototype = {
selectBox: undefined,
draw: function (updateChart) {
- this.updateChart = updateChart;
- this.selectBox = buildSelectBox.call(this);
? -----
+ this.selectBox = buildSelectBox(this.options, updateChart);
? +++++++++++++++++++++
-
this.scope.append(this.selectBox);
}
};
function D3ChartSerieSelect(scope, options) {
this.scope = $(scope);
this.options = options;
}
return D3ChartSerieSelect;
}()); | 28 | 0.7 | 10 | 18 |
2b697b6061564b9c8c0bea1c491f657766952b6f | ci/bamboo/build-linux.sh | ci/bamboo/build-linux.sh | set -o errexit
set -o xtrace
# Prepare environment
if [ -z "${USER}" ]; then
USER="docker"
fi
export USER
# Setup compiler
if [ -z "${CC}" ]; then
CC="gcc"
fi
export CC
if [ "${CC}" = "clang" ]; then
if [ -z "${CXX}" ]; then
CXX="clang++"
fi
COMPILER_PACKAGES="clang-3.4" # Ubuntu-specific apt package name
else
if [ -z "${CXX}" ]; then
CXX="g++"
fi
COMPILER_PACKAGES="${CC} ${CXX}" # Ubuntu-specific apt package names
fi
export CXX
# Install OS dependencies, assuming stock ubuntu:latest
apt-get update
apt-get install -y \
wget \
git \
${COMPILER_PACKAGES} \
build-essential \
python \
python2.7 \
python2.7-dev
wget https://bootstrap.pypa.io/get-pip.py -O - | python
pip install --upgrade --ignore-installed setuptools
pip install wheel
# Move into root of nupic repository
pushd `git rev-parse --show-toplevel`
# Build installable python packages
python setup.py bdist_wheel
# Install nupic wheel and dependencies, including nupic.bindings artifact in
# wheelwhouse/
pip install -f wheelhouse/ dist/nupic-`cat VERSION`*.whl
# Invoke tests
python setup.py test
# Return to original path
popd
| set -o errexit
set -o xtrace
# Prepare environment
if [ -z "${USER}" ]; then
USER="docker"
fi
export USER
# Setup compiler
if [ -z "${CC}" ]; then
CC="gcc"
fi
export CC
if [ "${CC}" = "clang" ]; then
if [ -z "${CXX}" ]; then
CXX="clang++"
fi
COMPILER_PACKAGES="clang-3.4" # Ubuntu-specific apt package name
else
if [ -z "${CXX}" ]; then
CXX="g++"
fi
COMPILER_PACKAGES="${CC} ${CXX}" # Ubuntu-specific apt package names
fi
export CXX
# Install OS dependencies, assuming stock ubuntu:latest
apt-get update
apt-get install -y \
wget \
git \
${COMPILER_PACKAGES} \
build-essential \
python \
python2.7 \
python2.7-dev \
python-pip
pip install --upgrade --ignore-installed setuptools
pip install wheel
# Move into root of nupic repository
pushd `git rev-parse --show-toplevel`
# Build installable python packages
python setup.py bdist_wheel
# Install nupic wheel and dependencies, including nupic.bindings artifact in
# wheelwhouse/
pip install -f wheelhouse/ dist/nupic-`cat VERSION`*.whl
# Invoke tests
python setup.py test
# Return to original path
popd
| Fix pip install that fails to download. | Fix pip install that fails to download.
| Shell | agpl-3.0 | badlogicmanpreet/nupic,rcrowder/nupic,rhyolight/nupic,lscheinkman/nupic,numenta-ci/nupic,neuroidss/nupic,neuroidss/nupic,blueburningcoder/nupic,pulinagrawal/nupic,numenta/nupic,subutai/nupic,cogmission/nupic,marionleborgne/nupic,lscheinkman/nupic,ywcui1990/nupic,marionleborgne/nupic,badlogicmanpreet/nupic,blueburningcoder/nupic,rcrowder/nupic,EricSB/nupic,neuroidss/nupic,vitaly-krugl/nupic,pulinagrawal/nupic,numenta-ci/nupic,vitaly-krugl/nupic,cogmission/nupic,numenta-ci/nupic,arhik/nupic,marionleborgne/nupic,subutai/nupic,cogmission/nupic,alfonsokim/nupic,scottpurdy/nupic,arhik/nupic,numenta/nupic,rcrowder/nupic,ywcui1990/nupic,pulinagrawal/nupic,scottpurdy/nupic,lscheinkman/nupic,badlogicmanpreet/nupic,alfonsokim/nupic,arhik/nupic,rhyolight/nupic,rhyolight/nupic,EricSB/nupic,scottpurdy/nupic,blueburningcoder/nupic,subutai/nupic,alfonsokim/nupic,EricSB/nupic,vitaly-krugl/nupic,numenta/nupic,ywcui1990/nupic | shell | ## Code Before:
set -o errexit
set -o xtrace
# Prepare environment
if [ -z "${USER}" ]; then
USER="docker"
fi
export USER
# Setup compiler
if [ -z "${CC}" ]; then
CC="gcc"
fi
export CC
if [ "${CC}" = "clang" ]; then
if [ -z "${CXX}" ]; then
CXX="clang++"
fi
COMPILER_PACKAGES="clang-3.4" # Ubuntu-specific apt package name
else
if [ -z "${CXX}" ]; then
CXX="g++"
fi
COMPILER_PACKAGES="${CC} ${CXX}" # Ubuntu-specific apt package names
fi
export CXX
# Install OS dependencies, assuming stock ubuntu:latest
apt-get update
apt-get install -y \
wget \
git \
${COMPILER_PACKAGES} \
build-essential \
python \
python2.7 \
python2.7-dev
wget https://bootstrap.pypa.io/get-pip.py -O - | python
pip install --upgrade --ignore-installed setuptools
pip install wheel
# Move into root of nupic repository
pushd `git rev-parse --show-toplevel`
# Build installable python packages
python setup.py bdist_wheel
# Install nupic wheel and dependencies, including nupic.bindings artifact in
# wheelwhouse/
pip install -f wheelhouse/ dist/nupic-`cat VERSION`*.whl
# Invoke tests
python setup.py test
# Return to original path
popd
## Instruction:
Fix pip install that fails to download.
## Code After:
set -o errexit
set -o xtrace
# Prepare environment
if [ -z "${USER}" ]; then
USER="docker"
fi
export USER
# Setup compiler
if [ -z "${CC}" ]; then
CC="gcc"
fi
export CC
if [ "${CC}" = "clang" ]; then
if [ -z "${CXX}" ]; then
CXX="clang++"
fi
COMPILER_PACKAGES="clang-3.4" # Ubuntu-specific apt package name
else
if [ -z "${CXX}" ]; then
CXX="g++"
fi
COMPILER_PACKAGES="${CC} ${CXX}" # Ubuntu-specific apt package names
fi
export CXX
# Install OS dependencies, assuming stock ubuntu:latest
apt-get update
apt-get install -y \
wget \
git \
${COMPILER_PACKAGES} \
build-essential \
python \
python2.7 \
python2.7-dev \
python-pip
pip install --upgrade --ignore-installed setuptools
pip install wheel
# Move into root of nupic repository
pushd `git rev-parse --show-toplevel`
# Build installable python packages
python setup.py bdist_wheel
# Install nupic wheel and dependencies, including nupic.bindings artifact in
# wheelwhouse/
pip install -f wheelhouse/ dist/nupic-`cat VERSION`*.whl
# Invoke tests
python setup.py test
# Return to original path
popd
| set -o errexit
set -o xtrace
# Prepare environment
if [ -z "${USER}" ]; then
USER="docker"
fi
export USER
# Setup compiler
if [ -z "${CC}" ]; then
CC="gcc"
fi
export CC
if [ "${CC}" = "clang" ]; then
if [ -z "${CXX}" ]; then
CXX="clang++"
fi
COMPILER_PACKAGES="clang-3.4" # Ubuntu-specific apt package name
else
if [ -z "${CXX}" ]; then
CXX="g++"
fi
COMPILER_PACKAGES="${CC} ${CXX}" # Ubuntu-specific apt package names
fi
export CXX
# Install OS dependencies, assuming stock ubuntu:latest
apt-get update
apt-get install -y \
wget \
git \
${COMPILER_PACKAGES} \
build-essential \
python \
python2.7 \
- python2.7-dev
+ python2.7-dev \
? ++
- wget https://bootstrap.pypa.io/get-pip.py -O - | python
+ python-pip
pip install --upgrade --ignore-installed setuptools
pip install wheel
# Move into root of nupic repository
pushd `git rev-parse --show-toplevel`
# Build installable python packages
python setup.py bdist_wheel
# Install nupic wheel and dependencies, including nupic.bindings artifact in
# wheelwhouse/
pip install -f wheelhouse/ dist/nupic-`cat VERSION`*.whl
# Invoke tests
python setup.py test
# Return to original path
popd | 4 | 0.070175 | 2 | 2 |
12b72bcb64adf19537e008542a5b4f30c02df767 | rank_filter.recipe/build.sh | rank_filter.recipe/build.sh | if [ ! -z "${EXT_CC}" ] && [ "${EXT_CC}" != "<UNDEFINED>" ];
then
CC="${EXT_CC}"
fi
if [ ! -z "${EXT_CXX}" ] && [ "${EXT_CXX}" != "<UNDEFINED>" ];
then
CXX="${EXT_CXX}"
fi
# CONFIGURE
SRC="$(pwd)"
mkdir -pv build
cd build
BLD="$(pwd)"
cmake "${SRC}"\
-DCMAKE_MACOSX_RPATH=false \
\
-DCMAKE_PREFIX_PATH="${PREFIX}" \
\
-DCMAKE_SHARED_LINKER_FLAGS="${LDFLAGS} -Wl,-rpath,${PREFIX}/lib -L${PREFIX}/lib" \
\
-DCMAKE_CXX_LINK_FLAGS="${CXXFLAGS} -Wl,-rpath,${PREFIX}/lib -L${PREFIX}/lib" \
-DCMAKE_CXX_FLAGS="${CXXFLAGS} -Wl,-rpath,${PREFIX}/lib -L${PREFIX}/lib" \
\
-DBOOST_ROOT="${PREFIX}" \
-DVIGRA_ROOT="${PREFIX}" \
\
-DPYTHON_EXECUTABLE="${PYTHON}" \
\
# BUILD (in parallel)
make -j${CPU_COUNT}
# "install" to the build prefix (conda will relocate these files afterwards)
make install
| if [ ! -z "${EXT_CC}" ] && [ "${EXT_CC}" != "<UNDEFINED>" ];
then
CC="${EXT_CC}"
fi
if [ ! -z "${EXT_CXX}" ] && [ "${EXT_CXX}" != "<UNDEFINED>" ];
then
CXX="${EXT_CXX}"
fi
# CONFIGURE
SRC="$(pwd)"
mkdir -pv build
cd build
cmake "${SRC}"\
-DCMAKE_MACOSX_RPATH=false \
\
-DCMAKE_PREFIX_PATH="${PREFIX}" \
\
-DCMAKE_SHARED_LINKER_FLAGS="${LDFLAGS} -Wl,-rpath,${PREFIX}/lib -L${PREFIX}/lib" \
\
-DCMAKE_CXX_LINK_FLAGS="${CXXFLAGS} -Wl,-rpath,${PREFIX}/lib -L${PREFIX}/lib" \
-DCMAKE_CXX_FLAGS="${CXXFLAGS} -Wl,-rpath,${PREFIX}/lib -L${PREFIX}/lib" \
\
-DBOOST_ROOT="${PREFIX}" \
-DVIGRA_ROOT="${PREFIX}" \
\
-DPYTHON_EXECUTABLE="${PYTHON}" \
\
# BUILD (in parallel)
make -j${CPU_COUNT}
# "install" to the build prefix (conda will relocate these files afterwards)
make install
| Drop BLD environment variable as it is unused | Drop BLD environment variable as it is unused
Not to mention it is easily determined should we need it.
| Shell | bsd-3-clause | jakirkham/rank_filter,jakirkham/rank_filter,jakirkham/rank_filter,DudLab/rank_filter,nanshe-org/rank_filter,DudLab/rank_filter,DudLab/rank_filter,nanshe-org/rank_filter,nanshe-org/rank_filter | shell | ## Code Before:
if [ ! -z "${EXT_CC}" ] && [ "${EXT_CC}" != "<UNDEFINED>" ];
then
CC="${EXT_CC}"
fi
if [ ! -z "${EXT_CXX}" ] && [ "${EXT_CXX}" != "<UNDEFINED>" ];
then
CXX="${EXT_CXX}"
fi
# CONFIGURE
SRC="$(pwd)"
mkdir -pv build
cd build
BLD="$(pwd)"
cmake "${SRC}"\
-DCMAKE_MACOSX_RPATH=false \
\
-DCMAKE_PREFIX_PATH="${PREFIX}" \
\
-DCMAKE_SHARED_LINKER_FLAGS="${LDFLAGS} -Wl,-rpath,${PREFIX}/lib -L${PREFIX}/lib" \
\
-DCMAKE_CXX_LINK_FLAGS="${CXXFLAGS} -Wl,-rpath,${PREFIX}/lib -L${PREFIX}/lib" \
-DCMAKE_CXX_FLAGS="${CXXFLAGS} -Wl,-rpath,${PREFIX}/lib -L${PREFIX}/lib" \
\
-DBOOST_ROOT="${PREFIX}" \
-DVIGRA_ROOT="${PREFIX}" \
\
-DPYTHON_EXECUTABLE="${PYTHON}" \
\
# BUILD (in parallel)
make -j${CPU_COUNT}
# "install" to the build prefix (conda will relocate these files afterwards)
make install
## Instruction:
Drop BLD environment variable as it is unused
Not to mention it is easily determined should we need it.
## Code After:
if [ ! -z "${EXT_CC}" ] && [ "${EXT_CC}" != "<UNDEFINED>" ];
then
CC="${EXT_CC}"
fi
if [ ! -z "${EXT_CXX}" ] && [ "${EXT_CXX}" != "<UNDEFINED>" ];
then
CXX="${EXT_CXX}"
fi
# CONFIGURE
SRC="$(pwd)"
mkdir -pv build
cd build
cmake "${SRC}"\
-DCMAKE_MACOSX_RPATH=false \
\
-DCMAKE_PREFIX_PATH="${PREFIX}" \
\
-DCMAKE_SHARED_LINKER_FLAGS="${LDFLAGS} -Wl,-rpath,${PREFIX}/lib -L${PREFIX}/lib" \
\
-DCMAKE_CXX_LINK_FLAGS="${CXXFLAGS} -Wl,-rpath,${PREFIX}/lib -L${PREFIX}/lib" \
-DCMAKE_CXX_FLAGS="${CXXFLAGS} -Wl,-rpath,${PREFIX}/lib -L${PREFIX}/lib" \
\
-DBOOST_ROOT="${PREFIX}" \
-DVIGRA_ROOT="${PREFIX}" \
\
-DPYTHON_EXECUTABLE="${PYTHON}" \
\
# BUILD (in parallel)
make -j${CPU_COUNT}
# "install" to the build prefix (conda will relocate these files afterwards)
make install
| if [ ! -z "${EXT_CC}" ] && [ "${EXT_CC}" != "<UNDEFINED>" ];
then
CC="${EXT_CC}"
fi
if [ ! -z "${EXT_CXX}" ] && [ "${EXT_CXX}" != "<UNDEFINED>" ];
then
CXX="${EXT_CXX}"
fi
# CONFIGURE
SRC="$(pwd)"
mkdir -pv build
cd build
- BLD="$(pwd)"
cmake "${SRC}"\
-DCMAKE_MACOSX_RPATH=false \
\
-DCMAKE_PREFIX_PATH="${PREFIX}" \
\
-DCMAKE_SHARED_LINKER_FLAGS="${LDFLAGS} -Wl,-rpath,${PREFIX}/lib -L${PREFIX}/lib" \
\
-DCMAKE_CXX_LINK_FLAGS="${CXXFLAGS} -Wl,-rpath,${PREFIX}/lib -L${PREFIX}/lib" \
-DCMAKE_CXX_FLAGS="${CXXFLAGS} -Wl,-rpath,${PREFIX}/lib -L${PREFIX}/lib" \
\
-DBOOST_ROOT="${PREFIX}" \
-DVIGRA_ROOT="${PREFIX}" \
\
-DPYTHON_EXECUTABLE="${PYTHON}" \
\
# BUILD (in parallel)
make -j${CPU_COUNT}
# "install" to the build prefix (conda will relocate these files afterwards)
make install | 1 | 0.027027 | 0 | 1 |
b0d1a8c930e42f550222b16cd516f7bbe2ba0727 | package.json | package.json | {
"name": "react-redux-feature-toggles",
"version": "1.0.0",
"description": "A package for providing react-redux based feature toggles",
"main": "./lib/index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"compile": "babel --presets es2015 -d lib/ src/",
"prepublish": "npm run compile"
},
"author": "Filip Johansson",
"license": "MIT",
"dependencies": {
"react": "^15.2.1",
"react-dom": "^15.2.1",
"react-redux": "^4.4.5",
"redux": "^3.5.2"
},
"devDependencies": {
"babel-cli": "^6.18.0",
"babel-preset-env": "^1.1.4",
"babel-preset-es2015": "^6.18.0"
}
}
| {
"name": "react-redux-feature-toggles",
"version": "1.0.0",
"description": "A package for providing react-redux based feature toggles",
"main": "./lib/index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"compile": "babel --plugins transform-object-rest-spread --presets es2015,react -d lib/ src/",
"prepublish": "npm run compile"
},
"author": "Filip Johansson",
"license": "MIT",
"dependencies": {
"react": "^15.2.1",
"react-dom": "^15.2.1",
"react-redux": "^4.4.5",
"redux": "^3.5.2"
},
"devDependencies": {
"babel-cli": "^6.18.0",
"babel-plugin-transform-object-rest-spread": "^6.20.2",
"babel-preset-env": "^1.1.4",
"babel-preset-es2015": "^6.18.0",
"babel-preset-react": "^6.16.0",
"webpack": "^1.14.0"
},
"babel": {
"presets": [
"react",
"es2015"
]
}
}
| Add preset and plugin for react and spread operator | Add preset and plugin for react and spread operator
| JSON | mit | filip-ph-johansson/react-redux-feature-toggles | json | ## Code Before:
{
"name": "react-redux-feature-toggles",
"version": "1.0.0",
"description": "A package for providing react-redux based feature toggles",
"main": "./lib/index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"compile": "babel --presets es2015 -d lib/ src/",
"prepublish": "npm run compile"
},
"author": "Filip Johansson",
"license": "MIT",
"dependencies": {
"react": "^15.2.1",
"react-dom": "^15.2.1",
"react-redux": "^4.4.5",
"redux": "^3.5.2"
},
"devDependencies": {
"babel-cli": "^6.18.0",
"babel-preset-env": "^1.1.4",
"babel-preset-es2015": "^6.18.0"
}
}
## Instruction:
Add preset and plugin for react and spread operator
## Code After:
{
"name": "react-redux-feature-toggles",
"version": "1.0.0",
"description": "A package for providing react-redux based feature toggles",
"main": "./lib/index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"compile": "babel --plugins transform-object-rest-spread --presets es2015,react -d lib/ src/",
"prepublish": "npm run compile"
},
"author": "Filip Johansson",
"license": "MIT",
"dependencies": {
"react": "^15.2.1",
"react-dom": "^15.2.1",
"react-redux": "^4.4.5",
"redux": "^3.5.2"
},
"devDependencies": {
"babel-cli": "^6.18.0",
"babel-plugin-transform-object-rest-spread": "^6.20.2",
"babel-preset-env": "^1.1.4",
"babel-preset-es2015": "^6.18.0",
"babel-preset-react": "^6.16.0",
"webpack": "^1.14.0"
},
"babel": {
"presets": [
"react",
"es2015"
]
}
}
| {
"name": "react-redux-feature-toggles",
"version": "1.0.0",
"description": "A package for providing react-redux based feature toggles",
"main": "./lib/index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
- "compile": "babel --presets es2015 -d lib/ src/",
+ "compile": "babel --plugins transform-object-rest-spread --presets es2015,react -d lib/ src/",
"prepublish": "npm run compile"
},
"author": "Filip Johansson",
"license": "MIT",
"dependencies": {
"react": "^15.2.1",
"react-dom": "^15.2.1",
"react-redux": "^4.4.5",
"redux": "^3.5.2"
},
"devDependencies": {
"babel-cli": "^6.18.0",
+ "babel-plugin-transform-object-rest-spread": "^6.20.2",
"babel-preset-env": "^1.1.4",
- "babel-preset-es2015": "^6.18.0"
+ "babel-preset-es2015": "^6.18.0",
? +
+ "babel-preset-react": "^6.16.0",
+ "webpack": "^1.14.0"
+ },
+ "babel": {
+ "presets": [
+ "react",
+ "es2015"
+ ]
}
} | 13 | 0.541667 | 11 | 2 |
c02dd6b411b053076e2dbff89207079028ca0e6f | lib/genesis_collector/dmidecode.rb | lib/genesis_collector/dmidecode.rb | module GenesisCollector
module DmiDecode
def get_dmi_data
@dmi_data ||= parse_dmidecode(shellout_with_timeout('dmidecode --type processor --type memory'))
end
def parse_dmidecode(data)
dict={}
handle = 0
current_title = nil
data.lines.each do |line|
case line
when /^End Of Table/, /^\s+$/, /^\# dmidecode/, /^SMBIOS/, /structures occupying/, /^Table at/
next
when /^Handle\s+(.*?),\s+/
handle = $1.to_i(16)
when /(.*)\s+Information\n$/, /(.*)\s+Device\n$/, /(.*)\s+Device Mapped Address\n$/, /(.*)\s+Array Mapped Address\n$/
title = standardize_dmi_key($1)
current_title = title
dict[title] ||= []
dict[title] << {'handle' => handle}
else
raw_data = line.strip.split(':')
if raw_data.is_a?(Array) && raw_data.length == 2
k, v = raw_data
dict[current_title].last[standardize_dmi_key(k)] = v.strip
end
end
end
dict
end
private
def standardize_dmi_key(k)
k.downcase.gsub(/\s+/,'_')
end
end
end
| module GenesisCollector
module DmiDecode
def get_dmi_data
@dmi_data ||= parse_dmidecode(shellout_with_timeout('dmidecode --type processor --type memory'))
end
def parse_dmidecode(data)
dict={}
handle = 0
current_title = nil
data.lines.each do |line|
case line
when /^End Of Table/, /^\s+$/, /^\# dmidecode/, /^SMBIOS/, /structures occupying/, /^Table at/
next
when /^Handle\s+(.*?),\s+/
handle = $1.to_i(16)
when /(.*)\s+Information\n$/, /(.*)\s+Device\n$/, /(.*)\s+Device Mapped Address\n$/, /(.*)\s+Array Mapped Address\n$/, /Physical\s+(.*)\s+Array\n$/
title = standardize_dmi_key($1)
current_title = title
dict[title] ||= []
dict[title] << {'handle' => handle}
else
raw_data = line.strip.split(':')
if raw_data.is_a?(Array) && raw_data.length == 2
k, v = raw_data
dict[current_title].last[standardize_dmi_key(k)] = v.strip
end
end
end
dict
end
private
def standardize_dmi_key(k)
k.downcase.gsub(/\s+/,'_')
end
end
end
| Fix missing case for memory | Fix missing case for memory
| Ruby | mit | Shopify/genesis_collector,Shopify/genesis_collector | ruby | ## Code Before:
module GenesisCollector
module DmiDecode
def get_dmi_data
@dmi_data ||= parse_dmidecode(shellout_with_timeout('dmidecode --type processor --type memory'))
end
def parse_dmidecode(data)
dict={}
handle = 0
current_title = nil
data.lines.each do |line|
case line
when /^End Of Table/, /^\s+$/, /^\# dmidecode/, /^SMBIOS/, /structures occupying/, /^Table at/
next
when /^Handle\s+(.*?),\s+/
handle = $1.to_i(16)
when /(.*)\s+Information\n$/, /(.*)\s+Device\n$/, /(.*)\s+Device Mapped Address\n$/, /(.*)\s+Array Mapped Address\n$/
title = standardize_dmi_key($1)
current_title = title
dict[title] ||= []
dict[title] << {'handle' => handle}
else
raw_data = line.strip.split(':')
if raw_data.is_a?(Array) && raw_data.length == 2
k, v = raw_data
dict[current_title].last[standardize_dmi_key(k)] = v.strip
end
end
end
dict
end
private
def standardize_dmi_key(k)
k.downcase.gsub(/\s+/,'_')
end
end
end
## Instruction:
Fix missing case for memory
## Code After:
module GenesisCollector
module DmiDecode
def get_dmi_data
@dmi_data ||= parse_dmidecode(shellout_with_timeout('dmidecode --type processor --type memory'))
end
def parse_dmidecode(data)
dict={}
handle = 0
current_title = nil
data.lines.each do |line|
case line
when /^End Of Table/, /^\s+$/, /^\# dmidecode/, /^SMBIOS/, /structures occupying/, /^Table at/
next
when /^Handle\s+(.*?),\s+/
handle = $1.to_i(16)
when /(.*)\s+Information\n$/, /(.*)\s+Device\n$/, /(.*)\s+Device Mapped Address\n$/, /(.*)\s+Array Mapped Address\n$/, /Physical\s+(.*)\s+Array\n$/
title = standardize_dmi_key($1)
current_title = title
dict[title] ||= []
dict[title] << {'handle' => handle}
else
raw_data = line.strip.split(':')
if raw_data.is_a?(Array) && raw_data.length == 2
k, v = raw_data
dict[current_title].last[standardize_dmi_key(k)] = v.strip
end
end
end
dict
end
private
def standardize_dmi_key(k)
k.downcase.gsub(/\s+/,'_')
end
end
end
| module GenesisCollector
module DmiDecode
def get_dmi_data
@dmi_data ||= parse_dmidecode(shellout_with_timeout('dmidecode --type processor --type memory'))
end
def parse_dmidecode(data)
dict={}
handle = 0
current_title = nil
data.lines.each do |line|
case line
when /^End Of Table/, /^\s+$/, /^\# dmidecode/, /^SMBIOS/, /structures occupying/, /^Table at/
next
when /^Handle\s+(.*?),\s+/
handle = $1.to_i(16)
- when /(.*)\s+Information\n$/, /(.*)\s+Device\n$/, /(.*)\s+Device Mapped Address\n$/, /(.*)\s+Array Mapped Address\n$/
+ when /(.*)\s+Information\n$/, /(.*)\s+Device\n$/, /(.*)\s+Device Mapped Address\n$/, /(.*)\s+Array Mapped Address\n$/, /Physical\s+(.*)\s+Array\n$/
? ++++++++++++++++++++++++++++++
title = standardize_dmi_key($1)
current_title = title
dict[title] ||= []
dict[title] << {'handle' => handle}
else
raw_data = line.strip.split(':')
if raw_data.is_a?(Array) && raw_data.length == 2
k, v = raw_data
dict[current_title].last[standardize_dmi_key(k)] = v.strip
end
end
end
dict
end
private
def standardize_dmi_key(k)
k.downcase.gsub(/\s+/,'_')
end
end
end | 2 | 0.04878 | 1 | 1 |
a8bd79cf7bda8bd6e46073d1f90bfd51ba2efbf8 | .travis.yml | .travis.yml | language: cpp
matrix:
include:
- os: osx
osx_image: xcode10
env: MATRIX_EVAL="brew install qt" BUILD_TYPE=Release
- os: linux
addons: { apt: {
packages: [ g++-6, libboost1.55-all-dev, qt5-default ],
sources: [ ubuntu-toolchain-r-test, boost-latest ] } }
env: MATRIX_EVAL="CC=gcc-6 && CXX=g++-6" BUILD_TYPE=Release
before_install:
- eval "${MATRIX_EVAL}"
script:
- ci/travis-script.sh
notifications:
email: false
| language: cpp
matrix:
include:
- os: osx
osx_image: xcode10
env: MATRIX_EVAL="brew install qt" BUILD_TYPE=Release
- os: linux
addons: { apt: {
packages: [ g++-6, libboost1.55-all-dev, qt5-default ],
sources: [ ubuntu-toolchain-r-test, boost-latest ] } }
env: MATRIX_EVAL="CC=gcc-6 && CXX=g++-6" BUILD_TYPE=Release
- os: linux
addons: { apt: {
packages: [ clang-6.0, libstdc++-6-dev, libboost1.55-all-dev, qt5-default ],
sources: [ llvm-toolchain-trusty-6.0, ubuntu-toolchain-r-test, boost-latest ] } }
env: MATRIX_EVAL="CC=clang-6.0 && CXX=clang++-6.0" BUILD_TYPE=Release
before_install:
- eval "${MATRIX_EVAL}"
script:
- ci/travis-script.sh
notifications:
email: false
| Add clang-6 build on linux | Add clang-6 build on linux
| YAML | mit | benvenutti/core8,benvenutti/core8 | yaml | ## Code Before:
language: cpp
matrix:
include:
- os: osx
osx_image: xcode10
env: MATRIX_EVAL="brew install qt" BUILD_TYPE=Release
- os: linux
addons: { apt: {
packages: [ g++-6, libboost1.55-all-dev, qt5-default ],
sources: [ ubuntu-toolchain-r-test, boost-latest ] } }
env: MATRIX_EVAL="CC=gcc-6 && CXX=g++-6" BUILD_TYPE=Release
before_install:
- eval "${MATRIX_EVAL}"
script:
- ci/travis-script.sh
notifications:
email: false
## Instruction:
Add clang-6 build on linux
## Code After:
language: cpp
matrix:
include:
- os: osx
osx_image: xcode10
env: MATRIX_EVAL="brew install qt" BUILD_TYPE=Release
- os: linux
addons: { apt: {
packages: [ g++-6, libboost1.55-all-dev, qt5-default ],
sources: [ ubuntu-toolchain-r-test, boost-latest ] } }
env: MATRIX_EVAL="CC=gcc-6 && CXX=g++-6" BUILD_TYPE=Release
- os: linux
addons: { apt: {
packages: [ clang-6.0, libstdc++-6-dev, libboost1.55-all-dev, qt5-default ],
sources: [ llvm-toolchain-trusty-6.0, ubuntu-toolchain-r-test, boost-latest ] } }
env: MATRIX_EVAL="CC=clang-6.0 && CXX=clang++-6.0" BUILD_TYPE=Release
before_install:
- eval "${MATRIX_EVAL}"
script:
- ci/travis-script.sh
notifications:
email: false
| language: cpp
matrix:
include:
- os: osx
osx_image: xcode10
env: MATRIX_EVAL="brew install qt" BUILD_TYPE=Release
- os: linux
addons: { apt: {
packages: [ g++-6, libboost1.55-all-dev, qt5-default ],
sources: [ ubuntu-toolchain-r-test, boost-latest ] } }
env: MATRIX_EVAL="CC=gcc-6 && CXX=g++-6" BUILD_TYPE=Release
+ - os: linux
+ addons: { apt: {
+ packages: [ clang-6.0, libstdc++-6-dev, libboost1.55-all-dev, qt5-default ],
+ sources: [ llvm-toolchain-trusty-6.0, ubuntu-toolchain-r-test, boost-latest ] } }
+ env: MATRIX_EVAL="CC=clang-6.0 && CXX=clang++-6.0" BUILD_TYPE=Release
+
before_install:
- eval "${MATRIX_EVAL}"
script:
- ci/travis-script.sh
notifications:
email: false | 6 | 0.272727 | 6 | 0 |
d8e04eea9ad80f6433ac894a79643dd6bc17f49a | ui/person/backorders.html | ui/person/backorders.html | {% extends 'layout/page.html' %}
{% import 'macros.twig' as scat %}
{% block title %}
Backorders for {{ person.friendly_name }}
{% endblock %}
{% block content %}
{% if not backorders|length %}
<p class="lead">No backorders for this vendor.</p>
{% endif %}
{% for backorder in backorders %}
<h2>
{{ backorder.txn.friendly_type }}
{{ backorder.txn.formatted_number }}
</h2>
<table class="table table-striped table-hover">
<thead>
<tr>
<th class="col-sm-2">SKU</th>
<th class="col-sm-2">Code</th>
<th class="col-sm-7">Name</th>
<th class="col-sm-1">Quantity</th>
</tr>
</thead>
<tbody>
{% for item in backorder.items %}
<tr>
<td>{{ item.vendor_sku(person.id) }}</td>
<td>{{ item.code }}</td>
<td>{{ item.name }}</td>
<td>{{ item.ordered - item.allocated }}</td>
</tr>
{% endfor %}
</tbody>
</table>
{% endfor %}
{% endblock %}
| {% extends 'layout/page.html' %}
{% import 'macros.twig' as scat %}
{% block title %}
Backorders for {{ person.friendly_name }}
{% endblock %}
{% block content %}
{% if not backorders|length %}
<p class="lead">No backorders for this vendor.</p>
{% endif %}
{% for backorder in backorders %}
<table class="table table-striped table-hover">
<caption>
<h3 style="color: #000">
{{ backorder.txn.friendly_type }}
{{ backorder.txn.formatted_number }}
</h3>
</caption>
<thead>
<tr>
<th class="col-sm-2">SKU</th>
<th class="col-sm-2">Code</th>
<th class="col-sm-7">Name</th>
<th class="col-sm-1">Quantity</th>
</tr>
</thead>
<tbody>
{% for item in backorder.items %}
<tr>
<td>{{ item.vendor_sku(person.id) }}</td>
<td>{{ item.code }}</td>
<td>{{ item.name }}</td>
<td>{{ item.ordered - item.allocated }}</td>
</tr>
{% endfor %}
</tbody>
</table>
{% endfor %}
{% endblock %}
| Make table title use <caption> | Make table title use <caption>
| HTML | mit | jimwins/scat,jimwins/scat,jimwins/scat,jimwins/scat | html | ## Code Before:
{% extends 'layout/page.html' %}
{% import 'macros.twig' as scat %}
{% block title %}
Backorders for {{ person.friendly_name }}
{% endblock %}
{% block content %}
{% if not backorders|length %}
<p class="lead">No backorders for this vendor.</p>
{% endif %}
{% for backorder in backorders %}
<h2>
{{ backorder.txn.friendly_type }}
{{ backorder.txn.formatted_number }}
</h2>
<table class="table table-striped table-hover">
<thead>
<tr>
<th class="col-sm-2">SKU</th>
<th class="col-sm-2">Code</th>
<th class="col-sm-7">Name</th>
<th class="col-sm-1">Quantity</th>
</tr>
</thead>
<tbody>
{% for item in backorder.items %}
<tr>
<td>{{ item.vendor_sku(person.id) }}</td>
<td>{{ item.code }}</td>
<td>{{ item.name }}</td>
<td>{{ item.ordered - item.allocated }}</td>
</tr>
{% endfor %}
</tbody>
</table>
{% endfor %}
{% endblock %}
## Instruction:
Make table title use <caption>
## Code After:
{% extends 'layout/page.html' %}
{% import 'macros.twig' as scat %}
{% block title %}
Backorders for {{ person.friendly_name }}
{% endblock %}
{% block content %}
{% if not backorders|length %}
<p class="lead">No backorders for this vendor.</p>
{% endif %}
{% for backorder in backorders %}
<table class="table table-striped table-hover">
<caption>
<h3 style="color: #000">
{{ backorder.txn.friendly_type }}
{{ backorder.txn.formatted_number }}
</h3>
</caption>
<thead>
<tr>
<th class="col-sm-2">SKU</th>
<th class="col-sm-2">Code</th>
<th class="col-sm-7">Name</th>
<th class="col-sm-1">Quantity</th>
</tr>
</thead>
<tbody>
{% for item in backorder.items %}
<tr>
<td>{{ item.vendor_sku(person.id) }}</td>
<td>{{ item.code }}</td>
<td>{{ item.name }}</td>
<td>{{ item.ordered - item.allocated }}</td>
</tr>
{% endfor %}
</tbody>
</table>
{% endfor %}
{% endblock %}
| {% extends 'layout/page.html' %}
{% import 'macros.twig' as scat %}
{% block title %}
Backorders for {{ person.friendly_name }}
{% endblock %}
{% block content %}
{% if not backorders|length %}
<p class="lead">No backorders for this vendor.</p>
{% endif %}
{% for backorder in backorders %}
- <h2>
- {{ backorder.txn.friendly_type }}
- {{ backorder.txn.formatted_number }}
- </h2>
-
<table class="table table-striped table-hover">
+ <caption>
+ <h3 style="color: #000">
+ {{ backorder.txn.friendly_type }}
+ {{ backorder.txn.formatted_number }}
+ </h3>
+ </caption>
<thead>
<tr>
<th class="col-sm-2">SKU</th>
<th class="col-sm-2">Code</th>
<th class="col-sm-7">Name</th>
<th class="col-sm-1">Quantity</th>
</tr>
</thead>
<tbody>
{% for item in backorder.items %}
<tr>
<td>{{ item.vendor_sku(person.id) }}</td>
<td>{{ item.code }}</td>
<td>{{ item.name }}</td>
<td>{{ item.ordered - item.allocated }}</td>
</tr>
{% endfor %}
</tbody>
</table>
{% endfor %}
{% endblock %} | 11 | 0.275 | 6 | 5 |
6fb1a38d913504694df894c63a3530e0fec4957b | server/views/partials/kthHeader.handlebars | server/views/partials/kthHeader.handlebars | <!--indexOff: all-->
<header>
<div class="container-fluid">
<div class="container">
<div class="header-container__top">
{{{blocks.image}}}
{{{blocks.title}}}
{{{blocks.secondaryMenu}}}
</div>
<div class="header-container__bottom">
{{{blocks.megaMenu}}}
{{{blocks.search}}}
</div>
</div>
</div>
<div id="gradientBorder"></div>
</header>
<!--indexOn: all-->
| <!--indexOff: all-->
<header>
<div class="container-fluid">
<div class="container">
<div class="header-container__top">
{{{blocks.image}}} {{{blocks.title}}}
<div id="mobileMenuWrapper">
<button id="nav-icon" class="navbar-toggler nav-icon" type="button" title="Öppna/stäng mobilmenyn">
<span></span>
<span></span>
<span></span>
<span></span>
</button>
<nav id="mobileMenu" class="block navbar navbar-expand-lg navbar-light">
<div id="mobileMenuContent" class="navbar-collapse collapse">
<ul id="mobileMenuList" class="menu navbar-nav mr-auto"></ul>
</div>
</nav>
</div>
{{{blocks.secondaryMenu}}}
</div>
<div class="header-container__bottom">{{{blocks.megaMenu}}} {{{blocks.search}}}</div>
</div>
</div>
<div id="gradientBorder"></div>
<div class="container articleNavigation">
<div class="row justify-content-between">
{{breadcrumbs breadcrumbsPath lang}}
</div>
</div>
</header>
<!--indexOn: all-->
| Add mobile menu wrapper and breadcrumbs in KTH header | Add mobile menu wrapper and breadcrumbs in KTH header
| Handlebars | mit | KTH/node-web,KTH/node-web | handlebars | ## Code Before:
<!--indexOff: all-->
<header>
<div class="container-fluid">
<div class="container">
<div class="header-container__top">
{{{blocks.image}}}
{{{blocks.title}}}
{{{blocks.secondaryMenu}}}
</div>
<div class="header-container__bottom">
{{{blocks.megaMenu}}}
{{{blocks.search}}}
</div>
</div>
</div>
<div id="gradientBorder"></div>
</header>
<!--indexOn: all-->
## Instruction:
Add mobile menu wrapper and breadcrumbs in KTH header
## Code After:
<!--indexOff: all-->
<header>
<div class="container-fluid">
<div class="container">
<div class="header-container__top">
{{{blocks.image}}} {{{blocks.title}}}
<div id="mobileMenuWrapper">
<button id="nav-icon" class="navbar-toggler nav-icon" type="button" title="Öppna/stäng mobilmenyn">
<span></span>
<span></span>
<span></span>
<span></span>
</button>
<nav id="mobileMenu" class="block navbar navbar-expand-lg navbar-light">
<div id="mobileMenuContent" class="navbar-collapse collapse">
<ul id="mobileMenuList" class="menu navbar-nav mr-auto"></ul>
</div>
</nav>
</div>
{{{blocks.secondaryMenu}}}
</div>
<div class="header-container__bottom">{{{blocks.megaMenu}}} {{{blocks.search}}}</div>
</div>
</div>
<div id="gradientBorder"></div>
<div class="container articleNavigation">
<div class="row justify-content-between">
{{breadcrumbs breadcrumbsPath lang}}
</div>
</div>
</header>
<!--indexOn: all-->
| <!--indexOff: all-->
<header>
<div class="container-fluid">
<div class="container">
<div class="header-container__top">
- {{{blocks.image}}}
- {{{blocks.title}}}
+ {{{blocks.image}}} {{{blocks.title}}}
+ <div id="mobileMenuWrapper">
+ <button id="nav-icon" class="navbar-toggler nav-icon" type="button" title="Öppna/stäng mobilmenyn">
+ <span></span>
+ <span></span>
+ <span></span>
+ <span></span>
+ </button>
+ <nav id="mobileMenu" class="block navbar navbar-expand-lg navbar-light">
+ <div id="mobileMenuContent" class="navbar-collapse collapse">
+ <ul id="mobileMenuList" class="menu navbar-nav mr-auto"></ul>
+ </div>
+ </nav>
+ </div>
{{{blocks.secondaryMenu}}}
</div>
+ <div class="header-container__bottom">{{{blocks.megaMenu}}} {{{blocks.search}}}</div>
- <div class="header-container__bottom">
- {{{blocks.megaMenu}}}
- {{{blocks.search}}}
- </div>
</div>
</div>
<div id="gradientBorder"></div>
+
+ <div class="container articleNavigation">
+ <div class="row justify-content-between">
+ {{breadcrumbs breadcrumbsPath lang}}
+ </div>
+ </div>
</header>
<!--indexOn: all--> | 27 | 1.5 | 21 | 6 |
c86051ba22e21caf8fc20c2474d7714935658862 | site-lisp/db-utils-test.el | site-lisp/db-utils-test.el | ;;; db-utils-test.el -- Tests for db-utils -*- lexical-binding: t -*-
;;; Commentary:
;;; Code:
(require 'ert)
(require 'db-utils)
(require 'cl-lib)
(ert-deftest db-utils-db/ntp-to-time ()
"Test NTP conversion to readable time with `db/ntp-to-time'."
(should (equal (db/ntp-to-time #xdcd2ac0c #x05c6dbac)
"2017-05-26T13:28:44.022565583Z"))
(should (equal (db/ntp-to-time #xbd5927ee #xbc616000)
"2000-08-31T18:52:30.735860824Z")))
;;; db-utils-test.el ends here
| ;;; db-utils-test.el -- Tests for db-utils -*- lexical-binding: t -*-
;;; Commentary:
;;; Code:
(require 'ert)
(require 'db-utils)
(require 'cl-lib)
(ert-deftest db-utils-db/ntp-to-time ()
"Test NTP conversion to readable time with `db/ntp-to-time'."
(should (equal (db/ntp-to-time #xdcd2ac0c #x05c6dbac)
"2017-05-26T13:28:44.022565583Z"))
(should (equal (db/ntp-to-time #xbd5927ee #xbc616000)
"2000-08-31T18:52:30.735860824Z"))
(should (equal (db/ntp-to-time #x00000000 #x0000000)
"1900-01-01T00:00:00.000000000Z")))
;;; db-utils-test.el ends here
| Add obvious test for NTP conversion | Add obvious test for NTP conversion
This test could not have been added before, because conversion was not
guaranteed to work for dates before the unix epoch. It now is, and we finally
can check the start of the NTP epoch! :)
| Emacs Lisp | mit | exot/.emacs.d | emacs-lisp | ## Code Before:
;;; db-utils-test.el -- Tests for db-utils -*- lexical-binding: t -*-
;;; Commentary:
;;; Code:
(require 'ert)
(require 'db-utils)
(require 'cl-lib)
(ert-deftest db-utils-db/ntp-to-time ()
"Test NTP conversion to readable time with `db/ntp-to-time'."
(should (equal (db/ntp-to-time #xdcd2ac0c #x05c6dbac)
"2017-05-26T13:28:44.022565583Z"))
(should (equal (db/ntp-to-time #xbd5927ee #xbc616000)
"2000-08-31T18:52:30.735860824Z")))
;;; db-utils-test.el ends here
## Instruction:
Add obvious test for NTP conversion
This test could not have been added before, because conversion was not
guaranteed to work for dates before the unix epoch. It now is, and we finally
can check the start of the NTP epoch! :)
## Code After:
;;; db-utils-test.el -- Tests for db-utils -*- lexical-binding: t -*-
;;; Commentary:
;;; Code:
(require 'ert)
(require 'db-utils)
(require 'cl-lib)
(ert-deftest db-utils-db/ntp-to-time ()
"Test NTP conversion to readable time with `db/ntp-to-time'."
(should (equal (db/ntp-to-time #xdcd2ac0c #x05c6dbac)
"2017-05-26T13:28:44.022565583Z"))
(should (equal (db/ntp-to-time #xbd5927ee #xbc616000)
"2000-08-31T18:52:30.735860824Z"))
(should (equal (db/ntp-to-time #x00000000 #x0000000)
"1900-01-01T00:00:00.000000000Z")))
;;; db-utils-test.el ends here
| ;;; db-utils-test.el -- Tests for db-utils -*- lexical-binding: t -*-
;;; Commentary:
;;; Code:
(require 'ert)
(require 'db-utils)
(require 'cl-lib)
(ert-deftest db-utils-db/ntp-to-time ()
"Test NTP conversion to readable time with `db/ntp-to-time'."
(should (equal (db/ntp-to-time #xdcd2ac0c #x05c6dbac)
"2017-05-26T13:28:44.022565583Z"))
(should (equal (db/ntp-to-time #xbd5927ee #xbc616000)
- "2000-08-31T18:52:30.735860824Z")))
? -
+ "2000-08-31T18:52:30.735860824Z"))
+ (should (equal (db/ntp-to-time #x00000000 #x0000000)
+ "1900-01-01T00:00:00.000000000Z")))
;;; db-utils-test.el ends here | 4 | 0.222222 | 3 | 1 |
eae20462a9863b19f9520904795f7de217c5bf9d | src/generate_html_boilerplate.jsx | src/generate_html_boilerplate.jsx | // VARIABLES //
const IFRAME_RESIZER_CONTENT_SCRIPT_PATH = 'https://cdnjs.cloudflare.com/ajax/libs/iframe-resizer/4.2.1/iframeResizer.contentWindow.min.js';
// MAIN //
/**
* Generates a boilerplate HTML page which fetches a specified resource and overwrites its contents.
*
* @private
* @param {string} resourcePath - URL of resource to fetch
* @returns {string} boilerplate HTML page
*/
const generateHTMLBoilerplate = ( resourcePath ) => `<html>
<body>
<script type="text/javascript">
document.addEventListener( 'DOMContentLoaded', () => {
var html = fetch( '${resourcePath}' );
html
.then( response => response.text() )
.then( text => {
var script = document.createElement( 'script' );
script.src = '${IFRAME_RESIZER_CONTENT_SCRIPT_PATH}';
document.open();
document.write( text );
document.head.appendChild( script );
document.close();
});
});
</script>
</body>
</html>`;
// EXPORTS //
export default generateHTMLBoilerplate;
| /**
* @license Apache-2.0
*
* Copyright (c) 2019 The Stdlib Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// VARIABLES //
const IFRAME_RESIZER_CONTENT_SCRIPT_PATH = 'https://cdnjs.cloudflare.com/ajax/libs/iframe-resizer/4.2.1/iframeResizer.contentWindow.min.js';
// MAIN //
/**
* Generates a boilerplate HTML page which fetches a specified resource and overwrites its contents.
*
* @private
* @param {string} resourcePath - URL of resource to fetch
* @returns {string} boilerplate HTML page
*/
function generateHTMLBoilerplate( resourcePath ) {
return `<html>
<body>
<script type="text/javascript">
document.addEventListener( 'DOMContentLoaded', () => {
var html = fetch( '${resourcePath}' );
html
.then( response => response.text() )
.then( text => {
var script = document.createElement( 'script' );
script.src = '${IFRAME_RESIZER_CONTENT_SCRIPT_PATH}';
document.open();
document.write( text );
document.head.appendChild( script );
document.close();
});
});
</script>
</body>
</html>`;
}
// EXPORTS //
export default generateHTMLBoilerplate;
| Add missing license header and convert function type | Add missing license header and convert function type
| JSX | apache-2.0 | stdlib-js/www,stdlib-js/www,stdlib-js/www | jsx | ## Code Before:
// VARIABLES //
const IFRAME_RESIZER_CONTENT_SCRIPT_PATH = 'https://cdnjs.cloudflare.com/ajax/libs/iframe-resizer/4.2.1/iframeResizer.contentWindow.min.js';
// MAIN //
/**
* Generates a boilerplate HTML page which fetches a specified resource and overwrites its contents.
*
* @private
* @param {string} resourcePath - URL of resource to fetch
* @returns {string} boilerplate HTML page
*/
const generateHTMLBoilerplate = ( resourcePath ) => `<html>
<body>
<script type="text/javascript">
document.addEventListener( 'DOMContentLoaded', () => {
var html = fetch( '${resourcePath}' );
html
.then( response => response.text() )
.then( text => {
var script = document.createElement( 'script' );
script.src = '${IFRAME_RESIZER_CONTENT_SCRIPT_PATH}';
document.open();
document.write( text );
document.head.appendChild( script );
document.close();
});
});
</script>
</body>
</html>`;
// EXPORTS //
export default generateHTMLBoilerplate;
## Instruction:
Add missing license header and convert function type
## Code After:
/**
* @license Apache-2.0
*
* Copyright (c) 2019 The Stdlib Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// VARIABLES //
const IFRAME_RESIZER_CONTENT_SCRIPT_PATH = 'https://cdnjs.cloudflare.com/ajax/libs/iframe-resizer/4.2.1/iframeResizer.contentWindow.min.js';
// MAIN //
/**
* Generates a boilerplate HTML page which fetches a specified resource and overwrites its contents.
*
* @private
* @param {string} resourcePath - URL of resource to fetch
* @returns {string} boilerplate HTML page
*/
function generateHTMLBoilerplate( resourcePath ) {
return `<html>
<body>
<script type="text/javascript">
document.addEventListener( 'DOMContentLoaded', () => {
var html = fetch( '${resourcePath}' );
html
.then( response => response.text() )
.then( text => {
var script = document.createElement( 'script' );
script.src = '${IFRAME_RESIZER_CONTENT_SCRIPT_PATH}';
document.open();
document.write( text );
document.head.appendChild( script );
document.close();
});
});
</script>
</body>
</html>`;
}
// EXPORTS //
export default generateHTMLBoilerplate;
| + /**
+ * @license Apache-2.0
+ *
+ * Copyright (c) 2019 The Stdlib Authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
// VARIABLES //
const IFRAME_RESIZER_CONTENT_SCRIPT_PATH = 'https://cdnjs.cloudflare.com/ajax/libs/iframe-resizer/4.2.1/iframeResizer.contentWindow.min.js';
// MAIN //
/**
* Generates a boilerplate HTML page which fetches a specified resource and overwrites its contents.
*
* @private
* @param {string} resourcePath - URL of resource to fetch
* @returns {string} boilerplate HTML page
*/
- const generateHTMLBoilerplate = ( resourcePath ) => `<html>
? -- --- ^^^^^^^^^^
+ function generateHTMLBoilerplate( resourcePath ) {
? +++ ++ ^
+ return `<html>
<body>
<script type="text/javascript">
document.addEventListener( 'DOMContentLoaded', () => {
var html = fetch( '${resourcePath}' );
html
.then( response => response.text() )
.then( text => {
var script = document.createElement( 'script' );
script.src = '${IFRAME_RESIZER_CONTENT_SCRIPT_PATH}';
document.open();
document.write( text );
document.head.appendChild( script );
document.close();
});
});
</script>
</body>
</html>`;
+ }
// EXPORTS //
export default generateHTMLBoilerplate; | 22 | 0.578947 | 21 | 1 |
f3e19f8684e4d67b6635a02059d13be57cc4793a | build-schemas.sh | build-schemas.sh | zip="$1"
## Utility functions
die() {
echo
echo "*** $@" 1>&2;
exit 1;
}
## Checks on the variables
if test -z "$zip"; then
die "The ZIP file is a mandatory option."
fi
if test \! -f "$zip"; then
die "ZIP file '$zip' is not a regular file."
fi
if test \! -d garbage; then
die "The 'garbage' dir does not exist. Are you in the root dir?"
fi
## Unzip it
rm -rf garbage/*
unzip "$zip" -d garbage/ >/dev/null
src=`echo garbage/docbook-*/`
num=`echo "$src" | sed -e 's|garbage/docbook-\(.*\)/|\1|'`
dest=garbage/build
mkdir "$dest"
mkdir "$dest/content"
cp rsrc/docbook-schemas-pkg.xml "$dest/expath-pkg.xml"
cp rsrc/docbook-schemas-cxan.xml "$dest/cxan.xml"
cp "$src/docbook.nvdl" "$dest/content/"
cp "$src/sch/docbook.sch" "$dest/content/"
cp -R "$src/rng" "$dest/content/"
cp -R "$src/xsd" "$dest/content/"
( cd "$dest"; zip -r "docbook-schemas-${num}.xar" . )
mv "$dest"/*.xar dist/
| zip="$1"
## Utility functions
die() {
echo
echo "*** $@" 1>&2;
exit 1;
}
## Checks on the variables
if test -z "$zip"; then
die "The ZIP file is a mandatory option."
fi
if test \! -f "$zip"; then
die "ZIP file '$zip' is not a regular file."
fi
if test \! -d garbage; then
die "The 'garbage' dir does not exist. Are you in the root dir?"
fi
## Unzip it
rm -rf garbage/*
unzip "$zip" -d garbage/ >/dev/null
src=`echo garbage/docbook-*/`
num=`echo "$src" | sed -e 's|garbage/docbook-\(.*\)/|\1|'`
dest=garbage/build
mkdir "$dest"
mkdir "$dest/content"
sed "s/{@VERSION}/$num/g" rsrc/docbook-schemas-pkg.xml > "$dest/expath-pkg.xml"
sed "s/{@VERSION}/$num/g" rsrc/docbook-schemas-cxan.xml > "$dest/cxan.xml"
cp "$src/docbook.nvdl" "$dest/content/"
cp "$src/dtd/docbook.dtd" "$dest/content/"
cp "$src/sch/docbook.sch" "$dest/content/"
cp -R "$src/rng" "$dest/content/"
cp -R "$src/xsd" "$dest/content/"
( cd "$dest"; zip -r "docbook-schemas-${num}.xar" . )
mv "$dest"/*.xar dist/
| Replace the version number in the package descriptors. | Replace the version number in the package descriptors.
| Shell | apache-2.0 | fgeorges/docbook-pkg | shell | ## Code Before:
zip="$1"
## Utility functions
die() {
echo
echo "*** $@" 1>&2;
exit 1;
}
## Checks on the variables
if test -z "$zip"; then
die "The ZIP file is a mandatory option."
fi
if test \! -f "$zip"; then
die "ZIP file '$zip' is not a regular file."
fi
if test \! -d garbage; then
die "The 'garbage' dir does not exist. Are you in the root dir?"
fi
## Unzip it
rm -rf garbage/*
unzip "$zip" -d garbage/ >/dev/null
src=`echo garbage/docbook-*/`
num=`echo "$src" | sed -e 's|garbage/docbook-\(.*\)/|\1|'`
dest=garbage/build
mkdir "$dest"
mkdir "$dest/content"
cp rsrc/docbook-schemas-pkg.xml "$dest/expath-pkg.xml"
cp rsrc/docbook-schemas-cxan.xml "$dest/cxan.xml"
cp "$src/docbook.nvdl" "$dest/content/"
cp "$src/sch/docbook.sch" "$dest/content/"
cp -R "$src/rng" "$dest/content/"
cp -R "$src/xsd" "$dest/content/"
( cd "$dest"; zip -r "docbook-schemas-${num}.xar" . )
mv "$dest"/*.xar dist/
## Instruction:
Replace the version number in the package descriptors.
## Code After:
zip="$1"
## Utility functions
die() {
echo
echo "*** $@" 1>&2;
exit 1;
}
## Checks on the variables
if test -z "$zip"; then
die "The ZIP file is a mandatory option."
fi
if test \! -f "$zip"; then
die "ZIP file '$zip' is not a regular file."
fi
if test \! -d garbage; then
die "The 'garbage' dir does not exist. Are you in the root dir?"
fi
## Unzip it
rm -rf garbage/*
unzip "$zip" -d garbage/ >/dev/null
src=`echo garbage/docbook-*/`
num=`echo "$src" | sed -e 's|garbage/docbook-\(.*\)/|\1|'`
dest=garbage/build
mkdir "$dest"
mkdir "$dest/content"
sed "s/{@VERSION}/$num/g" rsrc/docbook-schemas-pkg.xml > "$dest/expath-pkg.xml"
sed "s/{@VERSION}/$num/g" rsrc/docbook-schemas-cxan.xml > "$dest/cxan.xml"
cp "$src/docbook.nvdl" "$dest/content/"
cp "$src/dtd/docbook.dtd" "$dest/content/"
cp "$src/sch/docbook.sch" "$dest/content/"
cp -R "$src/rng" "$dest/content/"
cp -R "$src/xsd" "$dest/content/"
( cd "$dest"; zip -r "docbook-schemas-${num}.xar" . )
mv "$dest"/*.xar dist/
| zip="$1"
## Utility functions
die() {
echo
echo "*** $@" 1>&2;
exit 1;
}
## Checks on the variables
if test -z "$zip"; then
die "The ZIP file is a mandatory option."
fi
if test \! -f "$zip"; then
die "ZIP file '$zip' is not a regular file."
fi
if test \! -d garbage; then
die "The 'garbage' dir does not exist. Are you in the root dir?"
fi
## Unzip it
rm -rf garbage/*
unzip "$zip" -d garbage/ >/dev/null
src=`echo garbage/docbook-*/`
num=`echo "$src" | sed -e 's|garbage/docbook-\(.*\)/|\1|'`
dest=garbage/build
mkdir "$dest"
mkdir "$dest/content"
- cp rsrc/docbook-schemas-pkg.xml "$dest/expath-pkg.xml"
? ^^ ^^
+ sed "s/{@VERSION}/$num/g" rsrc/docbook-schemas-pkg.xml > "$dest/expath-pkg.xml"
? ^^^ ^^^^^^^^^^^^^^^^^^^^^ ++
- cp rsrc/docbook-schemas-cxan.xml "$dest/cxan.xml"
? ^^ ^^
+ sed "s/{@VERSION}/$num/g" rsrc/docbook-schemas-cxan.xml > "$dest/cxan.xml"
? ^^^ ^^^^^^^^^^^^^^^^^^^^^ ++
+
- cp "$src/docbook.nvdl" "$dest/content/"
? -------
+ cp "$src/docbook.nvdl" "$dest/content/"
+ cp "$src/dtd/docbook.dtd" "$dest/content/"
- cp "$src/sch/docbook.sch" "$dest/content/"
? -------
+ cp "$src/sch/docbook.sch" "$dest/content/"
- cp -R "$src/rng" "$dest/content/"
? -------
+ cp -R "$src/rng" "$dest/content/"
- cp -R "$src/xsd" "$dest/content/"
? -------
+ cp -R "$src/xsd" "$dest/content/"
( cd "$dest"; zip -r "docbook-schemas-${num}.xar" . )
mv "$dest"/*.xar dist/ | 14 | 0.304348 | 8 | 6 |
8822b0bdaa4ec13cfd0f78c07d79f4884e4c60aa | README.md | README.md | [](https://travis-ci.org/youngar/Base9)
# Building Base9
## Grab the source
clone this repository using git:
```sh
git clone https://github.com/youngar/Base9
cd Base9
```
## Building
### Set up the build directory
Create a build directory and configure the build. Base9 and OMR use Cmake.
We suggest using the ninja generator.
```sh
# Inside the Base9 directory
mkdir build && cd build
cmake -G Ninja ..
```
### Build it!
```sh
# Inside the build directory
ninja -j2
```
## Testing
To run the base9 tests, run:
```sh
# Inside the build directory
ctest -V
```
## Benchmarking
To run the benchmark, use:
```
ninja bench
```
| [](https://travis-ci.org/youngar/Base9)
# Building Base9
## Grab the source
clone this repository using git:
```sh
git clone https://github.com/youngar/Base9
cd Base9
# Download sub-modules: googletest and omr
git submodule update --init
```
## Building
### Set up the build directory
Create a build directory and configure the build. Base9 and OMR use Cmake.
We suggest using the ninja generator.
```sh
# Inside the Base9 directory
mkdir build && cd build
cmake -G Ninja ..
```
### Build it!
```sh
# Inside the build directory
ninja -j2
```
## Testing
To run the base9 tests, run:
```sh
# Inside the build directory
ctest -V
```
## Benchmarking
To run the benchmark, use:
```
ninja bench
```
| Add command to download sub-modules | Add command to download sub-modules
Signed-off-by: Xiaoli Liang <62a2846df84d0f90b761bf427dd67be80694b69d@gmail.com>
| Markdown | apache-2.0 | jduimovich/Base9,youngar/Base9,youngar/Base9,youngar/Base9,jduimovich/Base9 | markdown | ## Code Before:
[](https://travis-ci.org/youngar/Base9)
# Building Base9
## Grab the source
clone this repository using git:
```sh
git clone https://github.com/youngar/Base9
cd Base9
```
## Building
### Set up the build directory
Create a build directory and configure the build. Base9 and OMR use Cmake.
We suggest using the ninja generator.
```sh
# Inside the Base9 directory
mkdir build && cd build
cmake -G Ninja ..
```
### Build it!
```sh
# Inside the build directory
ninja -j2
```
## Testing
To run the base9 tests, run:
```sh
# Inside the build directory
ctest -V
```
## Benchmarking
To run the benchmark, use:
```
ninja bench
```
## Instruction:
Add command to download sub-modules
Signed-off-by: Xiaoli Liang <62a2846df84d0f90b761bf427dd67be80694b69d@gmail.com>
## Code After:
[](https://travis-ci.org/youngar/Base9)
# Building Base9
## Grab the source
clone this repository using git:
```sh
git clone https://github.com/youngar/Base9
cd Base9
# Download sub-modules: googletest and omr
git submodule update --init
```
## Building
### Set up the build directory
Create a build directory and configure the build. Base9 and OMR use Cmake.
We suggest using the ninja generator.
```sh
# Inside the Base9 directory
mkdir build && cd build
cmake -G Ninja ..
```
### Build it!
```sh
# Inside the build directory
ninja -j2
```
## Testing
To run the base9 tests, run:
```sh
# Inside the build directory
ctest -V
```
## Benchmarking
To run the benchmark, use:
```
ninja bench
```
| [](https://travis-ci.org/youngar/Base9)
# Building Base9
## Grab the source
clone this repository using git:
```sh
git clone https://github.com/youngar/Base9
cd Base9
+ # Download sub-modules: googletest and omr
+ git submodule update --init
```
## Building
### Set up the build directory
Create a build directory and configure the build. Base9 and OMR use Cmake.
We suggest using the ninja generator.
```sh
# Inside the Base9 directory
mkdir build && cd build
cmake -G Ninja ..
```
### Build it!
```sh
# Inside the build directory
ninja -j2
```
## Testing
To run the base9 tests, run:
```sh
# Inside the build directory
ctest -V
```
## Benchmarking
To run the benchmark, use:
```
ninja bench
``` | 2 | 0.042553 | 2 | 0 |
5f2aa3f526826c150b0659e1a905dedc16e31e5e | .circleci/config.yml | .circleci/config.yml | version: 2
jobs:
build:
environment:
BASH_ENV: "~/.nix-profile/etc/profile.d/nix.sh"
machine: true
steps:
- checkout
- run: git config --global user.email "travis-ci@example.com"
- run: git config --global user.name "Travis-CI"
- run: ./try-reflex --command "exit 0"
- run: ./test -j 2
| version: 2
jobs:
build:
environment:
BASH_ENV: "~/.nix-profile/etc/profile.d/nix.sh"
machine: true
steps:
- checkout
- run: git config --global user.email "travis-ci@example.com"
- run: git config --global user.name "Travis-CI"
- run:
command: ./try-reflex --command "exit 0"
no_output_timeout: 3600
- run: ./test -j 2
| Increase no output timeout in circle ci | Increase no output timeout in circle ci
| YAML | bsd-3-clause | reflex-frp/reflex-platform,reflex-frp/reflex-platform,ryantrinkle/try-reflex,reflex-frp/reflex-platform | yaml | ## Code Before:
version: 2
jobs:
build:
environment:
BASH_ENV: "~/.nix-profile/etc/profile.d/nix.sh"
machine: true
steps:
- checkout
- run: git config --global user.email "travis-ci@example.com"
- run: git config --global user.name "Travis-CI"
- run: ./try-reflex --command "exit 0"
- run: ./test -j 2
## Instruction:
Increase no output timeout in circle ci
## Code After:
version: 2
jobs:
build:
environment:
BASH_ENV: "~/.nix-profile/etc/profile.d/nix.sh"
machine: true
steps:
- checkout
- run: git config --global user.email "travis-ci@example.com"
- run: git config --global user.name "Travis-CI"
- run:
command: ./try-reflex --command "exit 0"
no_output_timeout: 3600
- run: ./test -j 2
| version: 2
jobs:
build:
environment:
BASH_ENV: "~/.nix-profile/etc/profile.d/nix.sh"
machine: true
steps:
- checkout
- run: git config --global user.email "travis-ci@example.com"
- run: git config --global user.name "Travis-CI"
+ - run:
- - run: ./try-reflex --command "exit 0"
? - ^^
+ command: ./try-reflex --command "exit 0"
? ^^^^^^^^ +
+ no_output_timeout: 3600
- run: ./test -j 2 | 4 | 0.25 | 3 | 1 |
d477b7d42ca57b2d7935c9cdf1413963fc61c4a0 | Cargo.toml | Cargo.toml | [package]
name = "staticfile"
version = "0.3.1"
authors = ["Zach Pomerantz <zmp@umich.edu>", "Jonathan Reem <jonathan.reem@gmail.com>"]
description = "Static file serving for Iron."
repository = "https://github.com/iron/staticfile"
license = "MIT"
keywords = ["iron", "web", "http", "file"]
[features]
cache = ["filetime"]
[dependencies]
iron = "0.5"
mount = "0.3"
time = "0.1"
log = "0.3"
url = "1.1"
[dependencies.filetime]
version = "0.1"
optional = true
[dev-dependencies]
hyper = "0.10"
router = "0.5"
[dev-dependencies.iron-test]
git = "https://github.com/reem/iron-test.git"
rev = "2077c759be395ab08b1bbe841eced7067fac9d34"
| [package]
name = "staticfile"
version = "0.3.1"
authors = ["Zach Pomerantz <zmp@umich.edu>", "Jonathan Reem <jonathan.reem@gmail.com>"]
description = "Static file serving for Iron."
repository = "https://github.com/iron/staticfile"
license = "MIT"
keywords = ["iron", "web", "http", "file"]
[features]
cache = ["filetime"]
[dependencies]
iron = "0.5"
mount = "0.3"
time = "0.1"
log = "0.3"
url = "1.1"
[dependencies.filetime]
version = "0.1"
optional = true
[dev-dependencies]
hyper = "0.10"
router = "0.5"
[dev-dependencies.iron-test]
git = "https://github.com/reem/iron-test.git"
rev = "3eaec2339a79952bb2864e7bf4ab7f690dd4f875"
| Use latest head of iron-test | Use latest head of iron-test
| TOML | mit | iron/staticfile | toml | ## Code Before:
[package]
name = "staticfile"
version = "0.3.1"
authors = ["Zach Pomerantz <zmp@umich.edu>", "Jonathan Reem <jonathan.reem@gmail.com>"]
description = "Static file serving for Iron."
repository = "https://github.com/iron/staticfile"
license = "MIT"
keywords = ["iron", "web", "http", "file"]
[features]
cache = ["filetime"]
[dependencies]
iron = "0.5"
mount = "0.3"
time = "0.1"
log = "0.3"
url = "1.1"
[dependencies.filetime]
version = "0.1"
optional = true
[dev-dependencies]
hyper = "0.10"
router = "0.5"
[dev-dependencies.iron-test]
git = "https://github.com/reem/iron-test.git"
rev = "2077c759be395ab08b1bbe841eced7067fac9d34"
## Instruction:
Use latest head of iron-test
## Code After:
[package]
name = "staticfile"
version = "0.3.1"
authors = ["Zach Pomerantz <zmp@umich.edu>", "Jonathan Reem <jonathan.reem@gmail.com>"]
description = "Static file serving for Iron."
repository = "https://github.com/iron/staticfile"
license = "MIT"
keywords = ["iron", "web", "http", "file"]
[features]
cache = ["filetime"]
[dependencies]
iron = "0.5"
mount = "0.3"
time = "0.1"
log = "0.3"
url = "1.1"
[dependencies.filetime]
version = "0.1"
optional = true
[dev-dependencies]
hyper = "0.10"
router = "0.5"
[dev-dependencies.iron-test]
git = "https://github.com/reem/iron-test.git"
rev = "3eaec2339a79952bb2864e7bf4ab7f690dd4f875"
| [package]
name = "staticfile"
version = "0.3.1"
authors = ["Zach Pomerantz <zmp@umich.edu>", "Jonathan Reem <jonathan.reem@gmail.com>"]
description = "Static file serving for Iron."
repository = "https://github.com/iron/staticfile"
license = "MIT"
keywords = ["iron", "web", "http", "file"]
[features]
cache = ["filetime"]
[dependencies]
iron = "0.5"
mount = "0.3"
time = "0.1"
log = "0.3"
url = "1.1"
[dependencies.filetime]
version = "0.1"
optional = true
[dev-dependencies]
hyper = "0.10"
router = "0.5"
[dev-dependencies.iron-test]
git = "https://github.com/reem/iron-test.git"
- rev = "2077c759be395ab08b1bbe841eced7067fac9d34"
+ rev = "3eaec2339a79952bb2864e7bf4ab7f690dd4f875" | 2 | 0.064516 | 1 | 1 |
f0198ea69e42645740a56402b175c64ed0fba3cc | index.html | index.html | <html>
<!-- For now this is just for testing. -->
<head>
<link href="https://fonts.googleapis.com/css?family=MedievalSharp" rel="stylesheet" type="text/css">
<link rel="stylesheet" type="text/css" href="styles.css">
<title>A Song of Ice and Fire vs. The Accursed Kings</title>
</head>
<body>
<h1><i>A Song of Ice and Fire</i> vs. <i>The Accursed Kings</i></h1>
<div style="float:left;">
<svg id="canvas"/>
<br>
<a href="https://github.com/smpcole/asoiaf-vs-tac">Contribute on GitHub</a>
</div>
<div id="side-panel">
<div id="instructions">
<h2>Instructions</h2>
<ol>
<li>Click on a character from either series to see which characters in the other series they are connected to.</li>
<li>Mouse over the highlighted lines to see explanations of the connections.</li>
</ol>
</div>
</div>
<script src="chars.js"></script>
<script src="blurbs.js"></script>
<script src="d3/d3.min.js" charset="utf-8"></script>
<script src="main.js"></script>
</body>
</html>
| <html>
<!-- For now this is just for testing. -->
<head>
<link href="https://fonts.googleapis.com/css?family=MedievalSharp" rel="stylesheet" type="text/css">
<link rel="stylesheet" type="text/css" href="styles.css">
<title>A Song of Ice and Fire vs. The Accursed Kings</title>
</head>
<body>
<h1><i>A Song of Ice and Fire</i> vs. <i>The Accursed Kings</i></h1>
<div style="float:left;">
<svg id="canvas"/>
<br>
<a href="https://github.com/smpcole/asoiaf-vs-tac">Contribute on GitHub</a>
</div>
<div id="side-panel">
<div id="instructions">
<h2>Instructions</h2>
<ol>
<li>Click on a character from either series to see which characters in the other series they are connected to.</li>
<li>Mouse over the highlighted lines to see explanations of the connections.</li>
</ol>
</div>
<div id="selected-char"/>
<div id="connected-char"/>
<div id="connections"/>
</div>
<script src="chars.js"></script>
<script src="blurbs.js"></script>
<script src="d3/d3.min.js" charset="utf-8"></script>
<script src="main.js"></script>
</body>
</html>
| Add divs for selected character, connected character, and connections to side panel | Add divs for selected character, connected character, and connections to side panel
| HTML | mit | smpcole/asoiaf-vs-tac,smpcole/theoriginalgot,smpcole/theoriginalgot,smpcole/theoriginalgot,smpcole/theoriginalgot,smpcole/asoiaf-vs-tac,smpcole/asoiaf-vs-tac,smpcole/asoiaf-vs-tac | html | ## Code Before:
<html>
<!-- For now this is just for testing. -->
<head>
<link href="https://fonts.googleapis.com/css?family=MedievalSharp" rel="stylesheet" type="text/css">
<link rel="stylesheet" type="text/css" href="styles.css">
<title>A Song of Ice and Fire vs. The Accursed Kings</title>
</head>
<body>
<h1><i>A Song of Ice and Fire</i> vs. <i>The Accursed Kings</i></h1>
<div style="float:left;">
<svg id="canvas"/>
<br>
<a href="https://github.com/smpcole/asoiaf-vs-tac">Contribute on GitHub</a>
</div>
<div id="side-panel">
<div id="instructions">
<h2>Instructions</h2>
<ol>
<li>Click on a character from either series to see which characters in the other series they are connected to.</li>
<li>Mouse over the highlighted lines to see explanations of the connections.</li>
</ol>
</div>
</div>
<script src="chars.js"></script>
<script src="blurbs.js"></script>
<script src="d3/d3.min.js" charset="utf-8"></script>
<script src="main.js"></script>
</body>
</html>
## Instruction:
Add divs for selected character, connected character, and connections to side panel
## Code After:
<html>
<!-- For now this is just for testing. -->
<head>
<link href="https://fonts.googleapis.com/css?family=MedievalSharp" rel="stylesheet" type="text/css">
<link rel="stylesheet" type="text/css" href="styles.css">
<title>A Song of Ice and Fire vs. The Accursed Kings</title>
</head>
<body>
<h1><i>A Song of Ice and Fire</i> vs. <i>The Accursed Kings</i></h1>
<div style="float:left;">
<svg id="canvas"/>
<br>
<a href="https://github.com/smpcole/asoiaf-vs-tac">Contribute on GitHub</a>
</div>
<div id="side-panel">
<div id="instructions">
<h2>Instructions</h2>
<ol>
<li>Click on a character from either series to see which characters in the other series they are connected to.</li>
<li>Mouse over the highlighted lines to see explanations of the connections.</li>
</ol>
</div>
<div id="selected-char"/>
<div id="connected-char"/>
<div id="connections"/>
</div>
<script src="chars.js"></script>
<script src="blurbs.js"></script>
<script src="d3/d3.min.js" charset="utf-8"></script>
<script src="main.js"></script>
</body>
</html>
| <html>
<!-- For now this is just for testing. -->
<head>
<link href="https://fonts.googleapis.com/css?family=MedievalSharp" rel="stylesheet" type="text/css">
<link rel="stylesheet" type="text/css" href="styles.css">
<title>A Song of Ice and Fire vs. The Accursed Kings</title>
</head>
<body>
<h1><i>A Song of Ice and Fire</i> vs. <i>The Accursed Kings</i></h1>
<div style="float:left;">
<svg id="canvas"/>
<br>
<a href="https://github.com/smpcole/asoiaf-vs-tac">Contribute on GitHub</a>
</div>
<div id="side-panel">
<div id="instructions">
<h2>Instructions</h2>
<ol>
<li>Click on a character from either series to see which characters in the other series they are connected to.</li>
<li>Mouse over the highlighted lines to see explanations of the connections.</li>
</ol>
</div>
+ <div id="selected-char"/>
+ <div id="connected-char"/>
+ <div id="connections"/>
+
</div>
<script src="chars.js"></script>
<script src="blurbs.js"></script>
<script src="d3/d3.min.js" charset="utf-8"></script>
<script src="main.js"></script>
</body>
</html> | 4 | 0.088889 | 4 | 0 |
4320eecc294fa1233a2ad7b4cdec1e2dc1e83b37 | testing/test_simbad.py | testing/test_simbad.py | import pytest
import vcr
try:
from unittest import mock
except ImportError:
import mock
from k2catalogue.models import EPIC, create_session
from k2catalogue.simbad import Simbad
@pytest.fixture
def session():
return create_session()
@pytest.fixture
def epic(session):
return session.query(EPIC).filter(EPIC.epic_id == 201763507).first()
@pytest.fixture
def simbad(epic):
return Simbad(epic)
@pytest.fixture
def form_data(simbad):
return simbad.form_data(radius=5.)
@vcr.use_cassette('.cassettes/response.yml')
@pytest.fixture
def response(simbad):
return simbad.send_request()
def test_form_data(form_data):
assert form_data['Coord'] == '169.18 4.72'
def test_response(response):
assert response.status_code == 200
def test_open(simbad):
with mock.patch('k2catalogue.simbad.webbrowser.open') as mock_open:
simbad.open(radius=10)
url, = mock_open.call_args[0]
assert 'file://' in url and 'html' in url
| import pytest
import vcr
try:
from unittest import mock
except ImportError:
import mock
from k2catalogue.models import EPIC, create_session
from k2catalogue.simbad import Simbad
@pytest.fixture
def session():
return create_session()
@pytest.fixture
def epic(session):
return mock.Mock(ra=123.456, dec=-56.789)
@pytest.fixture
def simbad(epic):
return Simbad(epic)
@pytest.fixture
def form_data(simbad):
return simbad.form_data(radius=5.)
@vcr.use_cassette('.cassettes/response.yml')
@pytest.fixture
def response(simbad):
return simbad.send_request()
def test_form_data(form_data):
assert form_data['Coord'] == '123.46 -56.79'
def test_response(response):
assert response.status_code == 200
def test_open(simbad):
with mock.patch('k2catalogue.simbad.webbrowser.open') as mock_open:
simbad.open(radius=10)
url, = mock_open.call_args[0]
assert 'file://' in url and 'html' in url
| Remove real database during testing | Remove real database during testing
| Python | mit | mindriot101/k2catalogue | python | ## Code Before:
import pytest
import vcr
try:
from unittest import mock
except ImportError:
import mock
from k2catalogue.models import EPIC, create_session
from k2catalogue.simbad import Simbad
@pytest.fixture
def session():
return create_session()
@pytest.fixture
def epic(session):
return session.query(EPIC).filter(EPIC.epic_id == 201763507).first()
@pytest.fixture
def simbad(epic):
return Simbad(epic)
@pytest.fixture
def form_data(simbad):
return simbad.form_data(radius=5.)
@vcr.use_cassette('.cassettes/response.yml')
@pytest.fixture
def response(simbad):
return simbad.send_request()
def test_form_data(form_data):
assert form_data['Coord'] == '169.18 4.72'
def test_response(response):
assert response.status_code == 200
def test_open(simbad):
with mock.patch('k2catalogue.simbad.webbrowser.open') as mock_open:
simbad.open(radius=10)
url, = mock_open.call_args[0]
assert 'file://' in url and 'html' in url
## Instruction:
Remove real database during testing
## Code After:
import pytest
import vcr
try:
from unittest import mock
except ImportError:
import mock
from k2catalogue.models import EPIC, create_session
from k2catalogue.simbad import Simbad
@pytest.fixture
def session():
return create_session()
@pytest.fixture
def epic(session):
return mock.Mock(ra=123.456, dec=-56.789)
@pytest.fixture
def simbad(epic):
return Simbad(epic)
@pytest.fixture
def form_data(simbad):
return simbad.form_data(radius=5.)
@vcr.use_cassette('.cassettes/response.yml')
@pytest.fixture
def response(simbad):
return simbad.send_request()
def test_form_data(form_data):
assert form_data['Coord'] == '123.46 -56.79'
def test_response(response):
assert response.status_code == 200
def test_open(simbad):
with mock.patch('k2catalogue.simbad.webbrowser.open') as mock_open:
simbad.open(radius=10)
url, = mock_open.call_args[0]
assert 'file://' in url and 'html' in url
| import pytest
import vcr
try:
from unittest import mock
except ImportError:
import mock
from k2catalogue.models import EPIC, create_session
from k2catalogue.simbad import Simbad
@pytest.fixture
def session():
return create_session()
@pytest.fixture
def epic(session):
- return session.query(EPIC).filter(EPIC.epic_id == 201763507).first()
+ return mock.Mock(ra=123.456, dec=-56.789)
@pytest.fixture
def simbad(epic):
return Simbad(epic)
@pytest.fixture
def form_data(simbad):
return simbad.form_data(radius=5.)
@vcr.use_cassette('.cassettes/response.yml')
@pytest.fixture
def response(simbad):
return simbad.send_request()
def test_form_data(form_data):
- assert form_data['Coord'] == '169.18 4.72'
? ---- ^ ^
+ assert form_data['Coord'] == '123.46 -56.79'
? ++++ ^^^ ^
def test_response(response):
assert response.status_code == 200
def test_open(simbad):
with mock.patch('k2catalogue.simbad.webbrowser.open') as mock_open:
simbad.open(radius=10)
url, = mock_open.call_args[0]
assert 'file://' in url and 'html' in url | 4 | 0.08 | 2 | 2 |
c532fbe1246fcf2032b09a417b27a932f0bce774 | server/services/ads.spec.js | server/services/ads.spec.js | /* global describe, beforeEach, afterEach, it */
const knex_config = require('../../knexfile');
const knex = require('knex')(knex_config['test']);
const chai = require('chai');
const should = chai.should();
describe('Handle ads', function() {
beforeEach(function(done) {
knex.migrate.rollback()
.then(function() {
knex.migrate.latest()
.then(function() {
return knex.seed.run()
.then(function() {
done();
});
});
});
});
afterEach(function(done) {
knex.migrate.rollback()
.then(function() {
done();
});
});
it('should list ads in order', (done) => {
done();
})
});
| /* global describe, beforeEach, afterEach, it */
const chai = require('chai');
const should = chai.should();
const knex_config = require('../../knexfile');
const knex = require('knex')(knex_config['test']);
const util = require('../util')({ knex });
const service = require('./ads')({ knex, util });
describe('Handle ads', function() {
beforeEach(function(done) {
knex.migrate.rollback()
.then(function() {
knex.migrate.latest()
.then(function() {
return knex.seed.run()
.then(function() {
done();
});
});
});
});
afterEach(function(done) {
knex.migrate.rollback()
.then(function() {
done();
});
});
it('should list ads in order', (done) => {
service.listAds(false).then((ads) => {
ads.map(ad => ad.id).should.eql([2, 3, 1]);
done();
})
})
});
| Test that ads appear in date order latest first | Test that ads appear in date order latest first
| JavaScript | agpl-3.0 | Tradenomiliitto/tradenomiitti,Tradenomiliitto/tradenomiitti,Tradenomiliitto/tradenomiitti | javascript | ## Code Before:
/* global describe, beforeEach, afterEach, it */
const knex_config = require('../../knexfile');
const knex = require('knex')(knex_config['test']);
const chai = require('chai');
const should = chai.should();
describe('Handle ads', function() {
beforeEach(function(done) {
knex.migrate.rollback()
.then(function() {
knex.migrate.latest()
.then(function() {
return knex.seed.run()
.then(function() {
done();
});
});
});
});
afterEach(function(done) {
knex.migrate.rollback()
.then(function() {
done();
});
});
it('should list ads in order', (done) => {
done();
})
});
## Instruction:
Test that ads appear in date order latest first
## Code After:
/* global describe, beforeEach, afterEach, it */
const chai = require('chai');
const should = chai.should();
const knex_config = require('../../knexfile');
const knex = require('knex')(knex_config['test']);
const util = require('../util')({ knex });
const service = require('./ads')({ knex, util });
describe('Handle ads', function() {
beforeEach(function(done) {
knex.migrate.rollback()
.then(function() {
knex.migrate.latest()
.then(function() {
return knex.seed.run()
.then(function() {
done();
});
});
});
});
afterEach(function(done) {
knex.migrate.rollback()
.then(function() {
done();
});
});
it('should list ads in order', (done) => {
service.listAds(false).then((ads) => {
ads.map(ad => ad.id).should.eql([2, 3, 1]);
done();
})
})
});
| /* global describe, beforeEach, afterEach, it */
+
+ const chai = require('chai');
+ const should = chai.should();
+
const knex_config = require('../../knexfile');
const knex = require('knex')(knex_config['test']);
- const chai = require('chai');
- const should = chai.should();
+ const util = require('../util')({ knex });
+ const service = require('./ads')({ knex, util });
describe('Handle ads', function() {
beforeEach(function(done) {
knex.migrate.rollback()
.then(function() {
knex.migrate.latest()
.then(function() {
return knex.seed.run()
.then(function() {
done();
});
});
});
});
afterEach(function(done) {
knex.migrate.rollback()
.then(function() {
done();
});
});
it('should list ads in order', (done) => {
+ service.listAds(false).then((ads) => {
+ ads.map(ad => ad.id).should.eql([2, 3, 1]);
- done();
+ done();
? ++
+ })
})
}); | 13 | 0.382353 | 10 | 3 |
d733e5adcdeb761a08bda0f606446ec3f3f29d36 | app/views/careers/show.html.erb | app/views/careers/show.html.erb | <h2><%= @career.name %></h2>
<%= link_to 'Back', careers_path %>
<p><%= @career.description %></p>
<ul class="requirements">
<% @career.requirements.each do |requirement| %>
<li>
<h3><%= requirement.skill.name %></h3>
<p>Required level: <%= requirement.level %></p>
<p>For seniority: <%= requirement.seniority %></p>
<%= link_to 'Edit', edit_career_requirement_path(@career, requirement) %>
<%= link_to 'Destroy', [@career, requirement], method: :delete, data: { confirm: 'Are you sure?' } %>
</li>
<% end %>
<li class="content-center">
<%= link_to 'New requirement', new_career_requirement_path(@career) %>
</li>
</ul>
<%= link_to 'Edit', edit_career_path(@career) %> |
<%= link_to 'Destroy', career_path(@career), method: :delete, data: { confirm: 'Are you sure?' } %> |
| <h2><%= @career.name %></h2>
<%= link_to 'Panorama', panorama_career_path(@career) %> |
<%= link_to 'Edit', edit_career_path(@career) %> |
<%= link_to 'Destroy', career_path(@career), method: :delete, data: { confirm: 'Are you sure?' } %> |
<%= link_to 'Back', careers_path %>
<p><%= @career.description %></p>
<h3>Requirements</h3>
<ul class="requirements">
<% @career.requirements.each do |requirement| %>
<li>
<h3><%= requirement.skill.name %></h3>
<p>Required level: <%= requirement.level %></p>
<p>For seniority: <%= requirement.seniority %></p>
<%= link_to 'Edit', edit_career_requirement_path(@career, requirement) %>
<%= link_to 'Destroy', [@career, requirement], method: :delete, data: { confirm: 'Are you sure?' } %>
</li>
<% end %>
<li class="content-center">
<%= link_to 'New requirement', new_career_requirement_path(@career) %>
</li>
</ul>
| Move career management links to top | Move career management links to top
It makes them easy to spot. Also added a title for requiremennts.
| HTML+ERB | agpl-3.0 | Altoros/hacker-assessor,Altoros/hacker-assessor | html+erb | ## Code Before:
<h2><%= @career.name %></h2>
<%= link_to 'Back', careers_path %>
<p><%= @career.description %></p>
<ul class="requirements">
<% @career.requirements.each do |requirement| %>
<li>
<h3><%= requirement.skill.name %></h3>
<p>Required level: <%= requirement.level %></p>
<p>For seniority: <%= requirement.seniority %></p>
<%= link_to 'Edit', edit_career_requirement_path(@career, requirement) %>
<%= link_to 'Destroy', [@career, requirement], method: :delete, data: { confirm: 'Are you sure?' } %>
</li>
<% end %>
<li class="content-center">
<%= link_to 'New requirement', new_career_requirement_path(@career) %>
</li>
</ul>
<%= link_to 'Edit', edit_career_path(@career) %> |
<%= link_to 'Destroy', career_path(@career), method: :delete, data: { confirm: 'Are you sure?' } %> |
## Instruction:
Move career management links to top
It makes them easy to spot. Also added a title for requiremennts.
## Code After:
<h2><%= @career.name %></h2>
<%= link_to 'Panorama', panorama_career_path(@career) %> |
<%= link_to 'Edit', edit_career_path(@career) %> |
<%= link_to 'Destroy', career_path(@career), method: :delete, data: { confirm: 'Are you sure?' } %> |
<%= link_to 'Back', careers_path %>
<p><%= @career.description %></p>
<h3>Requirements</h3>
<ul class="requirements">
<% @career.requirements.each do |requirement| %>
<li>
<h3><%= requirement.skill.name %></h3>
<p>Required level: <%= requirement.level %></p>
<p>For seniority: <%= requirement.seniority %></p>
<%= link_to 'Edit', edit_career_requirement_path(@career, requirement) %>
<%= link_to 'Destroy', [@career, requirement], method: :delete, data: { confirm: 'Are you sure?' } %>
</li>
<% end %>
<li class="content-center">
<%= link_to 'New requirement', new_career_requirement_path(@career) %>
</li>
</ul>
| <h2><%= @career.name %></h2>
+ <%= link_to 'Panorama', panorama_career_path(@career) %> |
+ <%= link_to 'Edit', edit_career_path(@career) %> |
+ <%= link_to 'Destroy', career_path(@career), method: :delete, data: { confirm: 'Are you sure?' } %> |
<%= link_to 'Back', careers_path %>
<p><%= @career.description %></p>
+
+ <h3>Requirements</h3>
<ul class="requirements">
<% @career.requirements.each do |requirement| %>
<li>
<h3><%= requirement.skill.name %></h3>
<p>Required level: <%= requirement.level %></p>
<p>For seniority: <%= requirement.seniority %></p>
<%= link_to 'Edit', edit_career_requirement_path(@career, requirement) %>
<%= link_to 'Destroy', [@career, requirement], method: :delete, data: { confirm: 'Are you sure?' } %>
</li>
<% end %>
<li class="content-center">
<%= link_to 'New requirement', new_career_requirement_path(@career) %>
</li>
</ul>
- <%= link_to 'Edit', edit_career_path(@career) %> |
- <%= link_to 'Destroy', career_path(@career), method: :delete, data: { confirm: 'Are you sure?' } %> | | 7 | 0.304348 | 5 | 2 |
2bb24220fee480a926f4622065bc6a4b03fa8fae | app/assets/stylesheets/pagination_fixes.css | app/assets/stylesheets/pagination_fixes.css | .pagination li.current, .pagination li.gap {
float: left;
padding: 0 14px;
line-height: 34px;
text-decoration: none;
border: 1px solid #DDD;
border-left-width: 0;
}
.pagination li.current {
background: #eee;
}
.pagination li:first-child, .pagination li:first-child a {
border-left-width: 1px;
} | .pagination li.current, .pagination li.gap {
float: left;
padding: 0 12px;
line-height: 34px;
text-decoration: none;
border: 1px solid #DDD;
border-left-width: 0;
}
.pagination li.current {
background: #eee;
}
.pagination li a {
padding: 0 12px;
}
.pagination li:first-child, .pagination li:first-child a {
border-left-width: 1px;
} | Reduce pagination size so that it doesn't wrap | Reduce pagination size so that it doesn't wrap
Courtesy of @workwithgreat
| CSS | mit | alphagov/signonotron2,alphagov/signonotron2,alphagov/signonotron2,alphagov/signonotron2 | css | ## Code Before:
.pagination li.current, .pagination li.gap {
float: left;
padding: 0 14px;
line-height: 34px;
text-decoration: none;
border: 1px solid #DDD;
border-left-width: 0;
}
.pagination li.current {
background: #eee;
}
.pagination li:first-child, .pagination li:first-child a {
border-left-width: 1px;
}
## Instruction:
Reduce pagination size so that it doesn't wrap
Courtesy of @workwithgreat
## Code After:
.pagination li.current, .pagination li.gap {
float: left;
padding: 0 12px;
line-height: 34px;
text-decoration: none;
border: 1px solid #DDD;
border-left-width: 0;
}
.pagination li.current {
background: #eee;
}
.pagination li a {
padding: 0 12px;
}
.pagination li:first-child, .pagination li:first-child a {
border-left-width: 1px;
} | .pagination li.current, .pagination li.gap {
float: left;
- padding: 0 14px;
? ^
+ padding: 0 12px;
? ^
line-height: 34px;
text-decoration: none;
border: 1px solid #DDD;
border-left-width: 0;
}
.pagination li.current {
background: #eee;
}
+ .pagination li a {
+ padding: 0 12px;
+ }
.pagination li:first-child, .pagination li:first-child a {
border-left-width: 1px;
+
} | 6 | 0.428571 | 5 | 1 |
555a0afbb704b1650c0e3208e45f1c12697d9404 | README.md | README.md | Stringizer is a standalone String Utility Library
## Status
Under Development and Unstable.
| Stringizer is a standalone String Utility Library
[](https://travis-ci.org/jasonlam604/Stringizer)
## Status
Under Development and Unstable.
| Add Travis-CI Build Badge and trigger first build | Add Travis-CI Build Badge and trigger first build | Markdown | mit | jasonlam604/Stringizer | markdown | ## Code Before:
Stringizer is a standalone String Utility Library
## Status
Under Development and Unstable.
## Instruction:
Add Travis-CI Build Badge and trigger first build
## Code After:
Stringizer is a standalone String Utility Library
[](https://travis-ci.org/jasonlam604/Stringizer)
## Status
Under Development and Unstable.
| Stringizer is a standalone String Utility Library
+ [](https://travis-ci.org/jasonlam604/Stringizer)
## Status
Under Development and Unstable. | 1 | 0.166667 | 1 | 0 |
b627c237ec72312216e57be69fb724109b1b2fda | pkgs/tools/misc/slop/default.nix | pkgs/tools/misc/slop/default.nix | { stdenv, fetchFromGitHub, cmake
, glew, glm, mesa, libX11, libXext, libXrender, cppcheck, icu}:
stdenv.mkDerivation rec {
name = "slop-${version}";
version = "7.3.49";
src = fetchFromGitHub {
owner = "naelstrof";
repo = "slop";
rev = "v${version}";
sha256 = "0is3mh2d1jqgvv72v5x92w23yf26n8n384nbr1b6cn883aw8j7jz";
};
nativeBuildInputs = [ cmake ];
buildInputs = [ glew glm mesa libX11 libXext libXrender icu ]
++ stdenv.lib.optional doCheck cppcheck;
doCheck = false;
meta = with stdenv.lib; {
inherit (src.meta) homepage;
description = "Queries a selection from the user and prints to stdout";
platforms = stdenv.lib.platforms.all;
license = stdenv.lib.licenses.gpl3Plus;
maintainers = with maintainers; [ primeos mbakke ];
};
}
| { stdenv, fetchFromGitHub, cmake, pkgconfig
, glew, glm, mesa, libX11, libXext, libXrender, cppcheck, icu}:
stdenv.mkDerivation rec {
name = "slop-${version}";
version = "7.3.49";
src = fetchFromGitHub {
owner = "naelstrof";
repo = "slop";
rev = "v${version}";
sha256 = "0is3mh2d1jqgvv72v5x92w23yf26n8n384nbr1b6cn883aw8j7jz";
};
nativeBuildInputs = [ cmake pkgconfig ];
buildInputs = [ glew glm mesa libX11 libXext libXrender icu ]
++ stdenv.lib.optional doCheck cppcheck;
doCheck = false;
meta = with stdenv.lib; {
inherit (src.meta) homepage;
description = "Queries a selection from the user and prints to stdout";
platforms = stdenv.lib.platforms.all;
license = stdenv.lib.licenses.gpl3Plus;
maintainers = with maintainers; [ primeos mbakke ];
};
}
| Add pkgconfig as build dependency | slop: Add pkgconfig as build dependency
Fix: "Could NOT find PkgConfig (missing: PKG_CONFIG_EXECUTABLE)"
| Nix | mit | NixOS/nixpkgs,NixOS/nixpkgs,SymbiFlow/nixpkgs,NixOS/nixpkgs,SymbiFlow/nixpkgs,SymbiFlow/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,SymbiFlow/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,SymbiFlow/nixpkgs,SymbiFlow/nixpkgs,SymbiFlow/nixpkgs,SymbiFlow/nixpkgs,SymbiFlow/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,SymbiFlow/nixpkgs,SymbiFlow/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,SymbiFlow/nixpkgs,SymbiFlow/nixpkgs,NixOS/nixpkgs | nix | ## Code Before:
{ stdenv, fetchFromGitHub, cmake
, glew, glm, mesa, libX11, libXext, libXrender, cppcheck, icu}:
stdenv.mkDerivation rec {
name = "slop-${version}";
version = "7.3.49";
src = fetchFromGitHub {
owner = "naelstrof";
repo = "slop";
rev = "v${version}";
sha256 = "0is3mh2d1jqgvv72v5x92w23yf26n8n384nbr1b6cn883aw8j7jz";
};
nativeBuildInputs = [ cmake ];
buildInputs = [ glew glm mesa libX11 libXext libXrender icu ]
++ stdenv.lib.optional doCheck cppcheck;
doCheck = false;
meta = with stdenv.lib; {
inherit (src.meta) homepage;
description = "Queries a selection from the user and prints to stdout";
platforms = stdenv.lib.platforms.all;
license = stdenv.lib.licenses.gpl3Plus;
maintainers = with maintainers; [ primeos mbakke ];
};
}
## Instruction:
slop: Add pkgconfig as build dependency
Fix: "Could NOT find PkgConfig (missing: PKG_CONFIG_EXECUTABLE)"
## Code After:
{ stdenv, fetchFromGitHub, cmake, pkgconfig
, glew, glm, mesa, libX11, libXext, libXrender, cppcheck, icu}:
stdenv.mkDerivation rec {
name = "slop-${version}";
version = "7.3.49";
src = fetchFromGitHub {
owner = "naelstrof";
repo = "slop";
rev = "v${version}";
sha256 = "0is3mh2d1jqgvv72v5x92w23yf26n8n384nbr1b6cn883aw8j7jz";
};
nativeBuildInputs = [ cmake pkgconfig ];
buildInputs = [ glew glm mesa libX11 libXext libXrender icu ]
++ stdenv.lib.optional doCheck cppcheck;
doCheck = false;
meta = with stdenv.lib; {
inherit (src.meta) homepage;
description = "Queries a selection from the user and prints to stdout";
platforms = stdenv.lib.platforms.all;
license = stdenv.lib.licenses.gpl3Plus;
maintainers = with maintainers; [ primeos mbakke ];
};
}
| - { stdenv, fetchFromGitHub, cmake
+ { stdenv, fetchFromGitHub, cmake, pkgconfig
? +++++++++++
, glew, glm, mesa, libX11, libXext, libXrender, cppcheck, icu}:
stdenv.mkDerivation rec {
name = "slop-${version}";
version = "7.3.49";
src = fetchFromGitHub {
owner = "naelstrof";
repo = "slop";
rev = "v${version}";
sha256 = "0is3mh2d1jqgvv72v5x92w23yf26n8n384nbr1b6cn883aw8j7jz";
};
- nativeBuildInputs = [ cmake ];
+ nativeBuildInputs = [ cmake pkgconfig ];
? ++++++++++
buildInputs = [ glew glm mesa libX11 libXext libXrender icu ]
++ stdenv.lib.optional doCheck cppcheck;
doCheck = false;
meta = with stdenv.lib; {
inherit (src.meta) homepage;
description = "Queries a selection from the user and prints to stdout";
platforms = stdenv.lib.platforms.all;
license = stdenv.lib.licenses.gpl3Plus;
maintainers = with maintainers; [ primeos mbakke ];
};
} | 4 | 0.142857 | 2 | 2 |
dbc6d619db67c2063064229310b1d28258bf5c7d | lib/cloudapp_api.rb | lib/cloudapp_api.rb | require "httparty"
require "yaml" unless defined?(YAML)
YAML::ENGINE.yamler = "syck" if defined?(YAML::ENGINE)
["base", "drop", "account", "gift_card", "client", "multipart", "httparty", "core_ext", "response_error"].each do |inc|
require File.join(File.dirname(__FILE__), "cloudapp", inc)
end
# A simple Ruby wrapper for the CloudApp API. Uses HTTParty and provides
# two alternative interfaces for interracting with the API.
# An ActiveResource-like interface is provided alongside a simple client interface.
module CloudApp
# Version number
VERSION = "0.3.1"
# Sets the authentication credentials in a class variable
#
# @param [String] email cl.ly username
# @param [String] password cl.ly password
# @return [Hash] authentication credentials
def CloudApp.authenticate(email, password)
Base.authenticate(email, password)
end
end
| require "httparty"
require "yaml" unless defined?(YAML)
YAML::ENGINE.yamler = "syck" if defined?(YAML::ENGINE) && RUBY_VERSION < "2.0.0"
["base", "drop", "account", "gift_card", "client", "multipart", "httparty", "core_ext", "response_error"].each do |inc|
require File.join(File.dirname(__FILE__), "cloudapp", inc)
end
# A simple Ruby wrapper for the CloudApp API. Uses HTTParty and provides
# two alternative interfaces for interracting with the API.
# An ActiveResource-like interface is provided alongside a simple client interface.
module CloudApp
# Version number
VERSION = "0.3.1"
# Sets the authentication credentials in a class variable
#
# @param [String] email cl.ly username
# @param [String] password cl.ly password
# @return [Hash] authentication credentials
def CloudApp.authenticate(email, password)
Base.authenticate(email, password)
end
end
| Use syck only on Ruby < 2.0 | Use syck only on Ruby < 2.0
Inspired by https://github.com/tamc/Slogger/commit/916e3e35598c09991327386256113c9340243a9a
| Ruby | mit | aaronrussell/cloudapp_api,mecampbellsoup/cloudapp_api | ruby | ## Code Before:
require "httparty"
require "yaml" unless defined?(YAML)
YAML::ENGINE.yamler = "syck" if defined?(YAML::ENGINE)
["base", "drop", "account", "gift_card", "client", "multipart", "httparty", "core_ext", "response_error"].each do |inc|
require File.join(File.dirname(__FILE__), "cloudapp", inc)
end
# A simple Ruby wrapper for the CloudApp API. Uses HTTParty and provides
# two alternative interfaces for interracting with the API.
# An ActiveResource-like interface is provided alongside a simple client interface.
module CloudApp
# Version number
VERSION = "0.3.1"
# Sets the authentication credentials in a class variable
#
# @param [String] email cl.ly username
# @param [String] password cl.ly password
# @return [Hash] authentication credentials
def CloudApp.authenticate(email, password)
Base.authenticate(email, password)
end
end
## Instruction:
Use syck only on Ruby < 2.0
Inspired by https://github.com/tamc/Slogger/commit/916e3e35598c09991327386256113c9340243a9a
## Code After:
require "httparty"
require "yaml" unless defined?(YAML)
YAML::ENGINE.yamler = "syck" if defined?(YAML::ENGINE) && RUBY_VERSION < "2.0.0"
["base", "drop", "account", "gift_card", "client", "multipart", "httparty", "core_ext", "response_error"].each do |inc|
require File.join(File.dirname(__FILE__), "cloudapp", inc)
end
# A simple Ruby wrapper for the CloudApp API. Uses HTTParty and provides
# two alternative interfaces for interracting with the API.
# An ActiveResource-like interface is provided alongside a simple client interface.
module CloudApp
# Version number
VERSION = "0.3.1"
# Sets the authentication credentials in a class variable
#
# @param [String] email cl.ly username
# @param [String] password cl.ly password
# @return [Hash] authentication credentials
def CloudApp.authenticate(email, password)
Base.authenticate(email, password)
end
end
| require "httparty"
require "yaml" unless defined?(YAML)
- YAML::ENGINE.yamler = "syck" if defined?(YAML::ENGINE)
+ YAML::ENGINE.yamler = "syck" if defined?(YAML::ENGINE) && RUBY_VERSION < "2.0.0"
? ++++++++++++++++++++++++++
["base", "drop", "account", "gift_card", "client", "multipart", "httparty", "core_ext", "response_error"].each do |inc|
require File.join(File.dirname(__FILE__), "cloudapp", inc)
end
# A simple Ruby wrapper for the CloudApp API. Uses HTTParty and provides
# two alternative interfaces for interracting with the API.
# An ActiveResource-like interface is provided alongside a simple client interface.
module CloudApp
# Version number
VERSION = "0.3.1"
# Sets the authentication credentials in a class variable
#
# @param [String] email cl.ly username
# @param [String] password cl.ly password
# @return [Hash] authentication credentials
def CloudApp.authenticate(email, password)
Base.authenticate(email, password)
end
end | 2 | 0.076923 | 1 | 1 |
3ada75821b7b2a76d2e92948ac4fe823a024e538 | requirements-dev.txt | requirements-dev.txt | coverage==3.7.1
mock>=1.0.1
pytest==2.7.2
pytest-cov==2.1.0
| coverage==3.7.1
flake8==3.5.0
mock>=1.0.1
pytest==2.7.2
pytest-cov==2.1.0
| Add flake8 to dev requirements | Add flake8 to dev requirements
Signed-off-by: Joffrey F <2e95f49799afcec0080c0aeb8813776d949e0768@docker.com>
| Text | apache-2.0 | jrabbit/compose,shin-/compose,dnephin/compose,vdemeester/compose,shin-/compose,dnephin/compose,funkyfuture/docker-compose,thaJeztah/compose,funkyfuture/docker-compose,thaJeztah/compose,jrabbit/compose,vdemeester/compose | text | ## Code Before:
coverage==3.7.1
mock>=1.0.1
pytest==2.7.2
pytest-cov==2.1.0
## Instruction:
Add flake8 to dev requirements
Signed-off-by: Joffrey F <2e95f49799afcec0080c0aeb8813776d949e0768@docker.com>
## Code After:
coverage==3.7.1
flake8==3.5.0
mock>=1.0.1
pytest==2.7.2
pytest-cov==2.1.0
| coverage==3.7.1
+ flake8==3.5.0
mock>=1.0.1
pytest==2.7.2
pytest-cov==2.1.0 | 1 | 0.25 | 1 | 0 |
23950c5be105ed8bde62e6401dce808795beff0d | lib/commander/help_formatters/terminal/command_help.erb | lib/commander/help_formatters/terminal/command_help.erb |
<%= $terminal.color "NAME", :bold %>:
<%= @name %>
<% if @syntax -%>
<%= $terminal.color "SYNOPSIS", :bold %>:
<%= @syntax -%>
<% end -%>
<%= $terminal.color "DESCRIPTION", :bold %>:
<%= Commander::HelpFormatter.indent 4, (@description || @summary || 'No description.') -%>
<% unless @examples.empty? -%>
<%= $terminal.color "EXAMPLES", :bold %>:
<% for description, command in @examples -%>
# <%= description %>
<%= command %>
<% end -%>
<% end -%>
<% unless @options.empty? -%>
<%= $terminal.color "OPTIONS", :bold %>:
<% for option in @options -%>
<%= option[:switches].join ', ' %>
<%= option[:description] %>
<% end -%>
<% end -%>
|
<%= $terminal.color "NAME", :bold %>:
<%= @name %>
<% if @syntax -%>
<%= $terminal.color "SYNOPSIS", :bold %>:
<%= @syntax -%>
<% end -%>
<%= $terminal.color "DESCRIPTION", :bold %>:
<%= Commander::HelpFormatter.indent 4, (@description || @summary || 'No description.') -%>
<% unless @examples.empty? -%>
<%= $terminal.color "EXAMPLES", :bold %>:
<% for description, command in @examples -%>
# <%= description %>
<%= command %>
<% end -%>
<% end -%>
<% unless @options.empty? -%>
<%= $terminal.color "OPTIONS", :bold %>:
<% for option in @options -%>
<%= option[:switches].join ', ' %>
<%= Commander::HelpFormatter.indent 8, option[:description] %>
<% end -%>
<% end -%>
| Indent opt. description the same way as program and command description | Indent opt. description the same way as program and command description
| HTML+ERB | mit | commander-rb/commander,fastlane/commander,commander-rb/commander,fastlane/commander | html+erb | ## Code Before:
<%= $terminal.color "NAME", :bold %>:
<%= @name %>
<% if @syntax -%>
<%= $terminal.color "SYNOPSIS", :bold %>:
<%= @syntax -%>
<% end -%>
<%= $terminal.color "DESCRIPTION", :bold %>:
<%= Commander::HelpFormatter.indent 4, (@description || @summary || 'No description.') -%>
<% unless @examples.empty? -%>
<%= $terminal.color "EXAMPLES", :bold %>:
<% for description, command in @examples -%>
# <%= description %>
<%= command %>
<% end -%>
<% end -%>
<% unless @options.empty? -%>
<%= $terminal.color "OPTIONS", :bold %>:
<% for option in @options -%>
<%= option[:switches].join ', ' %>
<%= option[:description] %>
<% end -%>
<% end -%>
## Instruction:
Indent opt. description the same way as program and command description
## Code After:
<%= $terminal.color "NAME", :bold %>:
<%= @name %>
<% if @syntax -%>
<%= $terminal.color "SYNOPSIS", :bold %>:
<%= @syntax -%>
<% end -%>
<%= $terminal.color "DESCRIPTION", :bold %>:
<%= Commander::HelpFormatter.indent 4, (@description || @summary || 'No description.') -%>
<% unless @examples.empty? -%>
<%= $terminal.color "EXAMPLES", :bold %>:
<% for description, command in @examples -%>
# <%= description %>
<%= command %>
<% end -%>
<% end -%>
<% unless @options.empty? -%>
<%= $terminal.color "OPTIONS", :bold %>:
<% for option in @options -%>
<%= option[:switches].join ', ' %>
<%= Commander::HelpFormatter.indent 8, option[:description] %>
<% end -%>
<% end -%>
|
<%= $terminal.color "NAME", :bold %>:
<%= @name %>
<% if @syntax -%>
<%= $terminal.color "SYNOPSIS", :bold %>:
<%= @syntax -%>
<% end -%>
<%= $terminal.color "DESCRIPTION", :bold %>:
<%= Commander::HelpFormatter.indent 4, (@description || @summary || 'No description.') -%>
<% unless @examples.empty? -%>
<%= $terminal.color "EXAMPLES", :bold %>:
<% for description, command in @examples -%>
# <%= description %>
<%= command %>
<% end -%>
<% end -%>
<% unless @options.empty? -%>
<%= $terminal.color "OPTIONS", :bold %>:
<% for option in @options -%>
<%= option[:switches].join ', ' %>
- <%= option[:description] %>
+ <%= Commander::HelpFormatter.indent 8, option[:description] %>
<% end -%>
<% end -%>
| 2 | 0.057143 | 1 | 1 |
a1bc59c8825d82836aa4ab2e3b4affe105a364c9 | src/Recurrence/RecurrenceInterface.php | src/Recurrence/RecurrenceInterface.php | <?php
namespace Plummer\Calendarful\Recurrence;
interface RecurrenceInterface
{
public function getLabel();
public function getLimit();
public function generateOccurrences(Array $events, $fromDate, $toDate, $limit = null);
}
| <?php
namespace Plummer\Calendarful\Recurrence;
interface RecurrenceInterface
{
public function getLabel();
public function getLimit();
public function generateOccurrences(Array $events, \DateTime $fromDate, \DateTime $toDate, $limit = null);
}
| Add DateTime type hints for date parameters in method signature. | Add DateTime type hints for date parameters in method signature.
| PHP | mit | benplummer/calendarful | php | ## Code Before:
<?php
namespace Plummer\Calendarful\Recurrence;
interface RecurrenceInterface
{
public function getLabel();
public function getLimit();
public function generateOccurrences(Array $events, $fromDate, $toDate, $limit = null);
}
## Instruction:
Add DateTime type hints for date parameters in method signature.
## Code After:
<?php
namespace Plummer\Calendarful\Recurrence;
interface RecurrenceInterface
{
public function getLabel();
public function getLimit();
public function generateOccurrences(Array $events, \DateTime $fromDate, \DateTime $toDate, $limit = null);
}
| <?php
namespace Plummer\Calendarful\Recurrence;
interface RecurrenceInterface
{
public function getLabel();
public function getLimit();
- public function generateOccurrences(Array $events, $fromDate, $toDate, $limit = null);
+ public function generateOccurrences(Array $events, \DateTime $fromDate, \DateTime $toDate, $limit = null);
? ++++++++++ ++++++++++
} | 2 | 0.166667 | 1 | 1 |
c4f1925972a3fbdc2f7f9c8c5b1db784af7bb505 | README.md | README.md |
This library is no longer maintained by the Authy team. If you're interested in becoming the maintainer, we will gladly work with you to unarchive the project and re-open it for contributions.
For the legacy README, see [this document](README-legacy.md).
|
This library is no longer maintained by the Authy team. If you're interested in becoming the maintainer, we will gladly work with you to unarchive the project and re-open it for contributions. Contact help@twilio.com if interested!
For the legacy README, see [this document](README-legacy.md).
| Add contact info for potential maintainers | Add contact info for potential maintainers
| Markdown | mit | authy/authy-ssh,authy/authy-ssh | markdown | ## Code Before:
This library is no longer maintained by the Authy team. If you're interested in becoming the maintainer, we will gladly work with you to unarchive the project and re-open it for contributions.
For the legacy README, see [this document](README-legacy.md).
## Instruction:
Add contact info for potential maintainers
## Code After:
This library is no longer maintained by the Authy team. If you're interested in becoming the maintainer, we will gladly work with you to unarchive the project and re-open it for contributions. Contact help@twilio.com if interested!
For the legacy README, see [this document](README-legacy.md).
|
- This library is no longer maintained by the Authy team. If you're interested in becoming the maintainer, we will gladly work with you to unarchive the project and re-open it for contributions.
+ This library is no longer maintained by the Authy team. If you're interested in becoming the maintainer, we will gladly work with you to unarchive the project and re-open it for contributions. Contact help@twilio.com if interested!
? +++++++++++++++++++++++++++++++++++++++
For the legacy README, see [this document](README-legacy.md). | 2 | 0.5 | 1 | 1 |
0b79ce4530ac5b3a98883e06ba1bf0e2434aa598 | js/myscript.js | js/myscript.js | // window.checkAdLoad = function(){
// var checkAdLoadfunction = function(){
// var msg = "Ad unit is not present";
// if($("._teraAdContainer")){
// console.log("Ad unit is present")
// msg = "Ad unit is present";
// }
// return msg;
// }
// checkAdLoadfunction()
// console.log(checkAdLoadfunction);
// var validateAd = new CustomEvent("trackAdLoad", { "detail": checkAdLoadfunction});
// window.dispatchEvent(validateAd);
// }
t$(document).ready(function(){
console.log("Checking the AdInstances.");
var checkAdLoadfunction = function(){
if(t$("._teraAdContainer")){
console.log("Ad unit is present");
t$('._teraAdContainer').mouseover(function(){
t$(this).focus();
console.log('Hoverred mouseover tera ad unit')
});
}
}
checkAdLoadfunction();
});
| // window.checkAdLoad = function(){
// var checkAdLoadfunction = function(){
// var msg = "Ad unit is not present";
// if($("._teraAdContainer")){
// console.log("Ad unit is present")
// msg = "Ad unit is present";
// }
// return msg;
// }
// checkAdLoadfunction()
// console.log(checkAdLoadfunction);
// var validateAd = new CustomEvent("trackAdLoad", { "detail": checkAdLoadfunction});
// window.dispatchEvent(validateAd);
// }
t$(document).ready(function(){
console.log("Checking the AdInstances.");
var checkAdLoadfunction = function(){
if(t$("._abmMainAdContainer")){
console.log("Ad unit is present");
t$('._teraAdContainer').mouseover(function(){
t$(this).find('._abmAdContainer').text("Mouse over tera ad unit")
console.log('Hoverred mouse over tera ad unit')
});
}
}
checkAdLoadfunction();
});
| Add changes to the js file | Add changes to the js file
| JavaScript | mit | hoverr/hoverr.github.io,hoverr/hoverr.github.io | javascript | ## Code Before:
// window.checkAdLoad = function(){
// var checkAdLoadfunction = function(){
// var msg = "Ad unit is not present";
// if($("._teraAdContainer")){
// console.log("Ad unit is present")
// msg = "Ad unit is present";
// }
// return msg;
// }
// checkAdLoadfunction()
// console.log(checkAdLoadfunction);
// var validateAd = new CustomEvent("trackAdLoad", { "detail": checkAdLoadfunction});
// window.dispatchEvent(validateAd);
// }
t$(document).ready(function(){
console.log("Checking the AdInstances.");
var checkAdLoadfunction = function(){
if(t$("._teraAdContainer")){
console.log("Ad unit is present");
t$('._teraAdContainer').mouseover(function(){
t$(this).focus();
console.log('Hoverred mouseover tera ad unit')
});
}
}
checkAdLoadfunction();
});
## Instruction:
Add changes to the js file
## Code After:
// window.checkAdLoad = function(){
// var checkAdLoadfunction = function(){
// var msg = "Ad unit is not present";
// if($("._teraAdContainer")){
// console.log("Ad unit is present")
// msg = "Ad unit is present";
// }
// return msg;
// }
// checkAdLoadfunction()
// console.log(checkAdLoadfunction);
// var validateAd = new CustomEvent("trackAdLoad", { "detail": checkAdLoadfunction});
// window.dispatchEvent(validateAd);
// }
t$(document).ready(function(){
console.log("Checking the AdInstances.");
var checkAdLoadfunction = function(){
if(t$("._abmMainAdContainer")){
console.log("Ad unit is present");
t$('._teraAdContainer').mouseover(function(){
t$(this).find('._abmAdContainer').text("Mouse over tera ad unit")
console.log('Hoverred mouse over tera ad unit')
});
}
}
checkAdLoadfunction();
});
| // window.checkAdLoad = function(){
// var checkAdLoadfunction = function(){
// var msg = "Ad unit is not present";
// if($("._teraAdContainer")){
// console.log("Ad unit is present")
// msg = "Ad unit is present";
// }
// return msg;
// }
// checkAdLoadfunction()
// console.log(checkAdLoadfunction);
// var validateAd = new CustomEvent("trackAdLoad", { "detail": checkAdLoadfunction});
// window.dispatchEvent(validateAd);
// }
t$(document).ready(function(){
console.log("Checking the AdInstances.");
var checkAdLoadfunction = function(){
- if(t$("._teraAdContainer")){
? ---
+ if(t$("._abmMainAdContainer")){
? ++++++
console.log("Ad unit is present");
t$('._teraAdContainer').mouseover(function(){
- t$(this).focus();
+ t$(this).find('._abmAdContainer').text("Mouse over tera ad unit")
- console.log('Hoverred mouseover tera ad unit')
+ console.log('Hoverred mouse over tera ad unit')
? +
});
}
}
checkAdLoadfunction();
}); | 6 | 0.214286 | 3 | 3 |
501f0515412c128f1797821702e8b60d65da13c8 | _news/announcement_9.md | _news/announcement_9.md | ---
layout: post
date: 2022-03-10
inline: true
---
Paper accepted at the [MLHRC Workshop](https://sites.google.com/view/mlhrc-hri-2022/home) at [HRI 2022](https://humanrobotinteraction.org/2022/).
| ---
layout: post
date: 2022-03-10
inline: true
---
[Paper](https://arxiv.org/pdf/2204.07631.pdf) accepted at the [MLHRC Workshop](https://sites.google.com/view/mlhrc-hri-2022/home) at [HRI 2022](https://humanrobotinteraction.org/2022/).
| Add link to paper in announcement | Add link to paper in announcement | Markdown | mit | AbhineetJain/abhineetjain.github.io,AbhineetJain/abhineetjain.github.io,AbhineetJain/abhineetjain.github.io,AbhineetJain/abhineetjain.github.io | markdown | ## Code Before:
---
layout: post
date: 2022-03-10
inline: true
---
Paper accepted at the [MLHRC Workshop](https://sites.google.com/view/mlhrc-hri-2022/home) at [HRI 2022](https://humanrobotinteraction.org/2022/).
## Instruction:
Add link to paper in announcement
## Code After:
---
layout: post
date: 2022-03-10
inline: true
---
[Paper](https://arxiv.org/pdf/2204.07631.pdf) accepted at the [MLHRC Workshop](https://sites.google.com/view/mlhrc-hri-2022/home) at [HRI 2022](https://humanrobotinteraction.org/2022/).
| ---
layout: post
date: 2022-03-10
inline: true
---
- Paper accepted at the [MLHRC Workshop](https://sites.google.com/view/mlhrc-hri-2022/home) at [HRI 2022](https://humanrobotinteraction.org/2022/).
+ [Paper](https://arxiv.org/pdf/2204.07631.pdf) accepted at the [MLHRC Workshop](https://sites.google.com/view/mlhrc-hri-2022/home) at [HRI 2022](https://humanrobotinteraction.org/2022/).
? + +++++++++++++++++++++++++++++++++++++++
| 2 | 0.285714 | 1 | 1 |
f70c0c057372d3062a88d47d27ce648bda6e5861 | src/greuh_liberation.Default/scripts/server/base/huron_manager.sqf | src/greuh_liberation.Default/scripts/server/base/huron_manager.sqf | waitUntil { !isNil "GRLIB_all_fobs" };
waitUntil { !isNil "save_is_loaded" };
firstloop = true;
HELO_TRANSPORT = objNull;
_savedhuron = objNull;
while { true } do {
{
if ( typeof _x == huron_typename ) then {
_savedhuron = _x;
};
} foreach vehicles;
if ( firstloop && !isNull _savedhuron ) then {
HELO_TRANSPORT = _savedhuron;
} else {
HELO_TRANSPORT = huron_typename createVehicle ( getpos huronspawn );
HELO_TRANSPORT setpos ( getpos huronspawn );
HELO_TRANSPORT setDir 0;
};
firstloop = false;
publicVariable "HELO_TRANSPORT";
clearWeaponCargoGlobal HELO_TRANSPORT;
clearMagazineCargoGlobal HELO_TRANSPORT;
clearItemCargoGlobal HELO_TRANSPORT;
clearBackpackCargoGlobal HELO_TRANSPORT;
if ( alive HELO_TRANSPORT ) then {
waitUntil {sleep 1;!alive HELO_TRANSPORT;};
sleep 15;
};
};
| waitUntil { !isNil "GRLIB_all_fobs" };
waitUntil { !isNil "save_is_loaded" };
firstloop = true;
HELO_TRANSPORT = objNull;
_savedhuron = objNull;
while { true } do {
// Check if the Helicopter is loaded from a save.
{if ( typeof _x == huron_typename ) then {_savedhuron = _x;};} foreach vehicles;
if ( firstloop && !isNull _savedhuron ) then {
HELO_TRANSPORT = _savedhuron;
} else {
HELO_TRANSPORT = huron_typename createVehicle ( getpos huronspawn );
HELO_TRANSPORT setpos ( getpos huronspawn );
HELO_TRANSPORT setDir 0;
};
firstloop = false;
publicVariable "HELO_TRANSPORT";
clearWeaponCargoGlobal HELO_TRANSPORT;
clearMagazineCargoGlobal HELO_TRANSPORT;
clearItemCargoGlobal HELO_TRANSPORT;
clearBackpackCargoGlobal HELO_TRANSPORT;
if ( alive HELO_TRANSPORT ) then {
waitUntil {sleep 1;!alive HELO_TRANSPORT;};
sleep 15;
deleteVehicle HELO_TRANSPORT;
};
};
| Remove Heli Wreck on respawn (Hopefully) | Remove Heli Wreck on respawn (Hopefully)
| SQF | mit | fparma/liberation,fparma/liberation,fparma/liberation | sqf | ## Code Before:
waitUntil { !isNil "GRLIB_all_fobs" };
waitUntil { !isNil "save_is_loaded" };
firstloop = true;
HELO_TRANSPORT = objNull;
_savedhuron = objNull;
while { true } do {
{
if ( typeof _x == huron_typename ) then {
_savedhuron = _x;
};
} foreach vehicles;
if ( firstloop && !isNull _savedhuron ) then {
HELO_TRANSPORT = _savedhuron;
} else {
HELO_TRANSPORT = huron_typename createVehicle ( getpos huronspawn );
HELO_TRANSPORT setpos ( getpos huronspawn );
HELO_TRANSPORT setDir 0;
};
firstloop = false;
publicVariable "HELO_TRANSPORT";
clearWeaponCargoGlobal HELO_TRANSPORT;
clearMagazineCargoGlobal HELO_TRANSPORT;
clearItemCargoGlobal HELO_TRANSPORT;
clearBackpackCargoGlobal HELO_TRANSPORT;
if ( alive HELO_TRANSPORT ) then {
waitUntil {sleep 1;!alive HELO_TRANSPORT;};
sleep 15;
};
};
## Instruction:
Remove Heli Wreck on respawn (Hopefully)
## Code After:
waitUntil { !isNil "GRLIB_all_fobs" };
waitUntil { !isNil "save_is_loaded" };
firstloop = true;
HELO_TRANSPORT = objNull;
_savedhuron = objNull;
while { true } do {
// Check if the Helicopter is loaded from a save.
{if ( typeof _x == huron_typename ) then {_savedhuron = _x;};} foreach vehicles;
if ( firstloop && !isNull _savedhuron ) then {
HELO_TRANSPORT = _savedhuron;
} else {
HELO_TRANSPORT = huron_typename createVehicle ( getpos huronspawn );
HELO_TRANSPORT setpos ( getpos huronspawn );
HELO_TRANSPORT setDir 0;
};
firstloop = false;
publicVariable "HELO_TRANSPORT";
clearWeaponCargoGlobal HELO_TRANSPORT;
clearMagazineCargoGlobal HELO_TRANSPORT;
clearItemCargoGlobal HELO_TRANSPORT;
clearBackpackCargoGlobal HELO_TRANSPORT;
if ( alive HELO_TRANSPORT ) then {
waitUntil {sleep 1;!alive HELO_TRANSPORT;};
sleep 15;
deleteVehicle HELO_TRANSPORT;
};
};
| waitUntil { !isNil "GRLIB_all_fobs" };
waitUntil { !isNil "save_is_loaded" };
firstloop = true;
HELO_TRANSPORT = objNull;
_savedhuron = objNull;
while { true } do {
+ // Check if the Helicopter is loaded from a save.
+ {if ( typeof _x == huron_typename ) then {_savedhuron = _x;};} foreach vehicles;
- {
- if ( typeof _x == huron_typename ) then {
- _savedhuron = _x;
- };
- } foreach vehicles;
if ( firstloop && !isNull _savedhuron ) then {
HELO_TRANSPORT = _savedhuron;
} else {
HELO_TRANSPORT = huron_typename createVehicle ( getpos huronspawn );
HELO_TRANSPORT setpos ( getpos huronspawn );
HELO_TRANSPORT setDir 0;
};
firstloop = false;
publicVariable "HELO_TRANSPORT";
clearWeaponCargoGlobal HELO_TRANSPORT;
clearMagazineCargoGlobal HELO_TRANSPORT;
clearItemCargoGlobal HELO_TRANSPORT;
clearBackpackCargoGlobal HELO_TRANSPORT;
if ( alive HELO_TRANSPORT ) then {
waitUntil {sleep 1;!alive HELO_TRANSPORT;};
sleep 15;
+ deleteVehicle HELO_TRANSPORT;
};
}; | 8 | 0.222222 | 3 | 5 |
60edb0066beb7d0ec804f214b19599b5cbcad35c | src/orbit-application/src/main/kotlin/orbit/application/App.kt | src/orbit-application/src/main/kotlin/orbit/application/App.kt | /*
Copyright (C) 2015 - 2019 Electronic Arts Inc. All rights reserved.
This file is part of the Orbit Project <https://www.orbit.cloud>.
See license in LICENSE.
*/
package orbit.application
import kotlinx.coroutines.runBlocking
import orbit.server.OrbitServer
import orbit.server.OrbitServerConfig
import orbit.server.etcd.EtcdAddressableDirectory
import orbit.server.etcd.EtcdNodeDirectory
fun main() {
runBlocking {
val server = OrbitServer(
OrbitServerConfig(
nodeDirectory = EtcdNodeDirectory.EtcdNodeDirectoryConfig(
System.getenv("NODE_DIRECTORY") ?: "0.0.0.0"
),
addressableDirectory = EtcdAddressableDirectory.EtcdAddressableDirectoryConfig(
System.getenv("ADDRESSABLE_DIRECTORY") ?: "0.0.0.0"
)
)
)
server.start().join()
}
} | /*
Copyright (C) 2015 - 2019 Electronic Arts Inc. All rights reserved.
This file is part of the Orbit Project <https://www.orbit.cloud>.
See license in LICENSE.
*/
package orbit.application
import kotlinx.coroutines.runBlocking
import kotlinx.coroutines.time.delay
import orbit.server.OrbitServer
import orbit.server.OrbitServerConfig
import orbit.server.etcd.EtcdAddressableDirectory
import orbit.server.etcd.EtcdNodeDirectory
import java.time.Duration
fun main() {
runBlocking {
val server = OrbitServer(
OrbitServerConfig(
nodeDirectory = EtcdNodeDirectory.EtcdNodeDirectoryConfig(
System.getenv("NODE_DIRECTORY") ?: "0.0.0.0"
),
addressableDirectory = EtcdAddressableDirectory.EtcdAddressableDirectoryConfig(
System.getenv("ADDRESSABLE_DIRECTORY") ?: "0.0.0.0"
)
)
)
delay(Duration.ofSeconds(5))
server.start().join()
}
} | Introduce temporary startup delay to account for dependency startup timing. Better options coming down the road. | Introduce temporary startup delay to account for dependency startup timing. Better options coming down the road.
| Kotlin | bsd-3-clause | orbit/orbit,orbit/orbit | kotlin | ## Code Before:
/*
Copyright (C) 2015 - 2019 Electronic Arts Inc. All rights reserved.
This file is part of the Orbit Project <https://www.orbit.cloud>.
See license in LICENSE.
*/
package orbit.application
import kotlinx.coroutines.runBlocking
import orbit.server.OrbitServer
import orbit.server.OrbitServerConfig
import orbit.server.etcd.EtcdAddressableDirectory
import orbit.server.etcd.EtcdNodeDirectory
fun main() {
runBlocking {
val server = OrbitServer(
OrbitServerConfig(
nodeDirectory = EtcdNodeDirectory.EtcdNodeDirectoryConfig(
System.getenv("NODE_DIRECTORY") ?: "0.0.0.0"
),
addressableDirectory = EtcdAddressableDirectory.EtcdAddressableDirectoryConfig(
System.getenv("ADDRESSABLE_DIRECTORY") ?: "0.0.0.0"
)
)
)
server.start().join()
}
}
## Instruction:
Introduce temporary startup delay to account for dependency startup timing. Better options coming down the road.
## Code After:
/*
Copyright (C) 2015 - 2019 Electronic Arts Inc. All rights reserved.
This file is part of the Orbit Project <https://www.orbit.cloud>.
See license in LICENSE.
*/
package orbit.application
import kotlinx.coroutines.runBlocking
import kotlinx.coroutines.time.delay
import orbit.server.OrbitServer
import orbit.server.OrbitServerConfig
import orbit.server.etcd.EtcdAddressableDirectory
import orbit.server.etcd.EtcdNodeDirectory
import java.time.Duration
fun main() {
runBlocking {
val server = OrbitServer(
OrbitServerConfig(
nodeDirectory = EtcdNodeDirectory.EtcdNodeDirectoryConfig(
System.getenv("NODE_DIRECTORY") ?: "0.0.0.0"
),
addressableDirectory = EtcdAddressableDirectory.EtcdAddressableDirectoryConfig(
System.getenv("ADDRESSABLE_DIRECTORY") ?: "0.0.0.0"
)
)
)
delay(Duration.ofSeconds(5))
server.start().join()
}
} | /*
Copyright (C) 2015 - 2019 Electronic Arts Inc. All rights reserved.
This file is part of the Orbit Project <https://www.orbit.cloud>.
See license in LICENSE.
*/
package orbit.application
import kotlinx.coroutines.runBlocking
+ import kotlinx.coroutines.time.delay
import orbit.server.OrbitServer
import orbit.server.OrbitServerConfig
import orbit.server.etcd.EtcdAddressableDirectory
import orbit.server.etcd.EtcdNodeDirectory
+ import java.time.Duration
fun main() {
runBlocking {
val server = OrbitServer(
OrbitServerConfig(
nodeDirectory = EtcdNodeDirectory.EtcdNodeDirectoryConfig(
System.getenv("NODE_DIRECTORY") ?: "0.0.0.0"
),
addressableDirectory = EtcdAddressableDirectory.EtcdAddressableDirectoryConfig(
System.getenv("ADDRESSABLE_DIRECTORY") ?: "0.0.0.0"
)
)
)
+ delay(Duration.ofSeconds(5))
server.start().join()
}
} | 3 | 0.1 | 3 | 0 |
08badbb9871132f20db15990f44b01e1f2f6f30b | status.sh | status.sh | if [ "$script_type" == "client-connect" ]; then
JSON="{\"pub\":\"$trusted_ip\",\"cn\":\"$common_name\",\"vpn\":\"$ifconfig_pool_remote_ip\"}"
curl -H "Content-Type: application/json" -X POST -d "$JSON" http://127.0.0.1:3013/server/0/connect --connect-timeout 2 -m 3
elif [ "$script_type" == "client-disconnect" ]; then
JSON="{\"cn\":\"$common_name\"}"
curl -H "Content-Type: application/json" -X POST -d "$JSON" http://127.0.0.1:3013/server/0/disconnect --connect-timeout 2 -m 3
fi
exit 0
| HOST="127.0.0.1"
PORT="3013"
SERVER="0"
if [ "$script_type" == "client-connect" ]; then
JSON="{\"pub\":\"$trusted_ip\",\"cn\":\"$common_name\",\"vpn\":\"$ifconfig_pool_remote_ip\"}"
curl -H "Content-Type: application/json" -X POST -d "$JSON" "http://$HOST:$PORT/server/$SERVER/connect" --connect-timeout 2 -m 3
elif [ "$script_type" == "client-disconnect" ]; then
JSON="{\"cn\":\"$common_name\"}"
curl -H "Content-Type: application/json" -X POST -d "$JSON" "http://$HOST:$PORT/server/$SERVER/disconnect" --connect-timeout 2 -m 3
fi
exit 0
| Use variables to configure script | Use variables to configure script
| Shell | mit | AuspeXeu/openvpn-status,AuspeXeu/openvpn-status | shell | ## Code Before:
if [ "$script_type" == "client-connect" ]; then
JSON="{\"pub\":\"$trusted_ip\",\"cn\":\"$common_name\",\"vpn\":\"$ifconfig_pool_remote_ip\"}"
curl -H "Content-Type: application/json" -X POST -d "$JSON" http://127.0.0.1:3013/server/0/connect --connect-timeout 2 -m 3
elif [ "$script_type" == "client-disconnect" ]; then
JSON="{\"cn\":\"$common_name\"}"
curl -H "Content-Type: application/json" -X POST -d "$JSON" http://127.0.0.1:3013/server/0/disconnect --connect-timeout 2 -m 3
fi
exit 0
## Instruction:
Use variables to configure script
## Code After:
HOST="127.0.0.1"
PORT="3013"
SERVER="0"
if [ "$script_type" == "client-connect" ]; then
JSON="{\"pub\":\"$trusted_ip\",\"cn\":\"$common_name\",\"vpn\":\"$ifconfig_pool_remote_ip\"}"
curl -H "Content-Type: application/json" -X POST -d "$JSON" "http://$HOST:$PORT/server/$SERVER/connect" --connect-timeout 2 -m 3
elif [ "$script_type" == "client-disconnect" ]; then
JSON="{\"cn\":\"$common_name\"}"
curl -H "Content-Type: application/json" -X POST -d "$JSON" "http://$HOST:$PORT/server/$SERVER/disconnect" --connect-timeout 2 -m 3
fi
exit 0
| + HOST="127.0.0.1"
+ PORT="3013"
+ SERVER="0"
+
if [ "$script_type" == "client-connect" ]; then
JSON="{\"pub\":\"$trusted_ip\",\"cn\":\"$common_name\",\"vpn\":\"$ifconfig_pool_remote_ip\"}"
- curl -H "Content-Type: application/json" -X POST -d "$JSON" http://127.0.0.1:3013/server/0/connect --connect-timeout 2 -m 3
? ^^^^^^^^^ ^^^^ ^
+ curl -H "Content-Type: application/json" -X POST -d "$JSON" "http://$HOST:$PORT/server/$SERVER/connect" --connect-timeout 2 -m 3
? + ^^^^^ ^^^^^ ^^^^^^^ +
elif [ "$script_type" == "client-disconnect" ]; then
JSON="{\"cn\":\"$common_name\"}"
- curl -H "Content-Type: application/json" -X POST -d "$JSON" http://127.0.0.1:3013/server/0/disconnect --connect-timeout 2 -m 3
? ^^^^^^^^^ ^^^^ ^
+ curl -H "Content-Type: application/json" -X POST -d "$JSON" "http://$HOST:$PORT/server/$SERVER/disconnect" --connect-timeout 2 -m 3
? + ^^^^^ ^^^^^ ^^^^^^^ +
fi
exit 0 | 8 | 1 | 6 | 2 |
1137ca1e25ffcf522312b897cd951283e200f15d | shelly/plotlyjs/static/plotlyjs/compress_attributes.js | shelly/plotlyjs/static/plotlyjs/compress_attributes.js | 'use strict';
/*
* Browserify transform that strips meta attributes out of the plotlyjs bundle
*
*/
var through = require('through2');
var attributeNamesToRemove = [
'description', 'requiredOpts', 'otherOpts', 'hrName', 'role'
];
var objectNamesToRemove = ['_deprecated'];
// ref: http://www.regexr.com/3bj6p
var regexStr = '';
attributeNamesToRemove.forEach(function(attr, i) {
// one line string with or without trailing comma
regexStr += attr + ': \'.*\'' + ',?' + '|';
// array (of strings) with or without trailing comma
regexStr += attr + ': \\[[\\s\\S]*?\\].*' + ',?' + '|';
});
// ref: http://www.regexr.com/3bor2
objectNamesToRemove.forEach(function(obj) {
// object with '// to delete following trailling '}'
regexStr += obj + ': {[\\s\\S]*?}' + ',? ?\\/\\/ ?to delete' + '|';
});
// remove trailing '|'
regexStr = regexStr.substring(0, regexStr.length-1);
var regex = new RegExp(regexStr, 'g');
module.exports = function() {
return through(function(buf, enc, next) {
this.push(
buf.toString('utf-8')
.replace(regex, '')
);
next();
});
};
| 'use strict';
/*
* Browserify transform that strips meta attributes out of the plotlyjs bundle
*
*/
var through = require('through2');
var attributeNamesToRemove = ['description', 'requiredOpts', 'otherOpts', 'hrName', 'role'],
regexStr = '';
// ref: http://www.regexr.com/3bj6p
attributeNamesToRemove.forEach(function(attr, i) {
// one line string with or without trailing comma
regexStr += attr + ': \'.*\'' + ',?' + '|';
// array of strings with or without trailing comma
regexStr += attr + ': \\[[\\s\\S]*?\\].*' + ',?';
if(i !== attributeNamesToRemove.length-1) regexStr += '|';
});
var regex = new RegExp(regexStr, 'g');
module.exports = function() {
return through(function(buf, enc, next) {
this.push(
buf.toString('utf-8')
.replace(regex, '')
);
next();
});
};
| Revert "add regex generator for object to delete (e.g. '_deprecated': {})" | Revert "add regex generator for object to delete (e.g. '_deprecated': {})"
This reverts commit 2774e638e9ffc9ccbfc8db911767d0929910f04b.
| JavaScript | mit | asolagmbh/plotly.js,plotly/plotly.js,plotly/plotly.js,iongroup/plotly.js,iongroup/plotly.js,plotly/plotly.js,etpinard/plotly.js,aburato/plotly.js,etpinard/plotly.js,asolagmbh/plotly.js,iongroup/plotly.js,plotly/plotly.js,aburato/plotly.js,asolagmbh/plotly.js,etpinard/plotly.js | javascript | ## Code Before:
'use strict';
/*
* Browserify transform that strips meta attributes out of the plotlyjs bundle
*
*/
var through = require('through2');
var attributeNamesToRemove = [
'description', 'requiredOpts', 'otherOpts', 'hrName', 'role'
];
var objectNamesToRemove = ['_deprecated'];
// ref: http://www.regexr.com/3bj6p
var regexStr = '';
attributeNamesToRemove.forEach(function(attr, i) {
// one line string with or without trailing comma
regexStr += attr + ': \'.*\'' + ',?' + '|';
// array (of strings) with or without trailing comma
regexStr += attr + ': \\[[\\s\\S]*?\\].*' + ',?' + '|';
});
// ref: http://www.regexr.com/3bor2
objectNamesToRemove.forEach(function(obj) {
// object with '// to delete following trailling '}'
regexStr += obj + ': {[\\s\\S]*?}' + ',? ?\\/\\/ ?to delete' + '|';
});
// remove trailing '|'
regexStr = regexStr.substring(0, regexStr.length-1);
var regex = new RegExp(regexStr, 'g');
module.exports = function() {
return through(function(buf, enc, next) {
this.push(
buf.toString('utf-8')
.replace(regex, '')
);
next();
});
};
## Instruction:
Revert "add regex generator for object to delete (e.g. '_deprecated': {})"
This reverts commit 2774e638e9ffc9ccbfc8db911767d0929910f04b.
## Code After:
'use strict';
/*
* Browserify transform that strips meta attributes out of the plotlyjs bundle
*
*/
var through = require('through2');
var attributeNamesToRemove = ['description', 'requiredOpts', 'otherOpts', 'hrName', 'role'],
regexStr = '';
// ref: http://www.regexr.com/3bj6p
attributeNamesToRemove.forEach(function(attr, i) {
// one line string with or without trailing comma
regexStr += attr + ': \'.*\'' + ',?' + '|';
// array of strings with or without trailing comma
regexStr += attr + ': \\[[\\s\\S]*?\\].*' + ',?';
if(i !== attributeNamesToRemove.length-1) regexStr += '|';
});
var regex = new RegExp(regexStr, 'g');
module.exports = function() {
return through(function(buf, enc, next) {
this.push(
buf.toString('utf-8')
.replace(regex, '')
);
next();
});
};
| 'use strict';
/*
* Browserify transform that strips meta attributes out of the plotlyjs bundle
*
*/
var through = require('through2');
- var attributeNamesToRemove = [
- 'description', 'requiredOpts', 'otherOpts', 'hrName', 'role'
? ^
+ var attributeNamesToRemove = ['description', 'requiredOpts', 'otherOpts', 'hrName', 'role'],
? +++ ++++++++++++++++++++++ + ^ ++
+ regexStr = '';
- ];
-
- var objectNamesToRemove = ['_deprecated'];
// ref: http://www.regexr.com/3bj6p
- var regexStr = '';
attributeNamesToRemove.forEach(function(attr, i) {
// one line string with or without trailing comma
regexStr += attr + ': \'.*\'' + ',?' + '|';
+ // array of strings with or without trailing comma
+ regexStr += attr + ': \\[[\\s\\S]*?\\].*' + ',?';
+ if(i !== attributeNamesToRemove.length-1) regexStr += '|';
- // array (of strings) with or without trailing comma
- regexStr += attr + ': \\[[\\s\\S]*?\\].*' + ',?' + '|';
});
-
- // ref: http://www.regexr.com/3bor2
- objectNamesToRemove.forEach(function(obj) {
- // object with '// to delete following trailling '}'
- regexStr += obj + ': {[\\s\\S]*?}' + ',? ?\\/\\/ ?to delete' + '|';
- });
-
- // remove trailing '|'
- regexStr = regexStr.substring(0, regexStr.length-1);
var regex = new RegExp(regexStr, 'g');
module.exports = function() {
return through(function(buf, enc, next) {
this.push(
buf.toString('utf-8')
.replace(regex, '')
);
next();
});
}; | 22 | 0.488889 | 5 | 17 |
4249c842fb835e188c9901f02b24dfefb50abcea | src/get-element-override.js | src/get-element-override.js | export default function getElementOverride (element, overrides) {
const parentNode = getTopMostElementParent(element)
for (let override in overrides) {
if (overrides.hasOwnProperty(override)) {
try {
const matches = parentNode.querySelectorAll(override)
if (Array.from(matches).indexOf(element) > -1) {
return overrides[override]
}
} catch (e) {
// At least we tried.
}
}
}
}
function getTopMostElementParent (element) {
let parentNode = element.parentNode
while (!parentNode) {
parentNode = parentNode.parentNode
}
return parentNode
}
| export default function getElementOverride (element, overrides) {
const parentNode = getTopMostElementParent(element)
for (let override in overrides) {
if (overrides.hasOwnProperty(override)) {
try {
const matches = parentNode.querySelectorAll(override)
if (Array.from(matches).indexOf(element) > -1) {
return overrides[override]
}
} catch (e) {
// At least we tried.
}
}
}
}
function getTopMostElementParent (element) {
let parentNode = element.parentNode
// Traverse up the DOM tree until the last parent is found.
while (parentNode.parentNode) {
parentNode = parentNode.parentNode
}
return parentNode
}
| Fix obvious bug in `getTopMostElementParent` | Fix obvious bug in `getTopMostElementParent`
| JavaScript | mit | Deschtex/html2react | javascript | ## Code Before:
export default function getElementOverride (element, overrides) {
const parentNode = getTopMostElementParent(element)
for (let override in overrides) {
if (overrides.hasOwnProperty(override)) {
try {
const matches = parentNode.querySelectorAll(override)
if (Array.from(matches).indexOf(element) > -1) {
return overrides[override]
}
} catch (e) {
// At least we tried.
}
}
}
}
function getTopMostElementParent (element) {
let parentNode = element.parentNode
while (!parentNode) {
parentNode = parentNode.parentNode
}
return parentNode
}
## Instruction:
Fix obvious bug in `getTopMostElementParent`
## Code After:
export default function getElementOverride (element, overrides) {
const parentNode = getTopMostElementParent(element)
for (let override in overrides) {
if (overrides.hasOwnProperty(override)) {
try {
const matches = parentNode.querySelectorAll(override)
if (Array.from(matches).indexOf(element) > -1) {
return overrides[override]
}
} catch (e) {
// At least we tried.
}
}
}
}
function getTopMostElementParent (element) {
let parentNode = element.parentNode
// Traverse up the DOM tree until the last parent is found.
while (parentNode.parentNode) {
parentNode = parentNode.parentNode
}
return parentNode
}
| export default function getElementOverride (element, overrides) {
const parentNode = getTopMostElementParent(element)
for (let override in overrides) {
if (overrides.hasOwnProperty(override)) {
try {
const matches = parentNode.querySelectorAll(override)
if (Array.from(matches).indexOf(element) > -1) {
return overrides[override]
}
} catch (e) {
// At least we tried.
}
}
}
}
function getTopMostElementParent (element) {
let parentNode = element.parentNode
+ // Traverse up the DOM tree until the last parent is found.
- while (!parentNode) {
? ^
+ while (parentNode.parentNode) {
? ^^^^^^^^^^^
parentNode = parentNode.parentNode
}
return parentNode
} | 3 | 0.125 | 2 | 1 |
2a3e39e7bed95bdadd0635dc7f4c060d183f5079 | headless-services/bosh-language-server/src/main/java/org/springframework/ide/vscode/bosh/Main.java | headless-services/bosh-language-server/src/main/java/org/springframework/ide/vscode/bosh/Main.java | /*******************************************************************************
* Copyright (c) 2016-2017 Pivotal, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Pivotal, Inc. - initial API and implementation
*******************************************************************************/
package org.springframework.ide.vscode.bosh;
import java.io.IOException;
import org.springframework.ide.vscode.bosh.models.BoshCommandCloudConfigProvider;
import org.springframework.ide.vscode.commons.languageserver.LaunguageServerApp;
public class Main {
public static void main(String[] args) throws IOException, InterruptedException {
LaunguageServerApp.start(() -> new BoshLanguageServer(
new BoshCommandCloudConfigProvider(),
(dc) -> null //TODO: real model provider here!
));
}
}
| /*******************************************************************************
* Copyright (c) 2016-2017 Pivotal, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Pivotal, Inc. - initial API and implementation
*******************************************************************************/
package org.springframework.ide.vscode.bosh;
import java.io.IOException;
import org.springframework.ide.vscode.bosh.models.BoshCommandCloudConfigProvider;
import org.springframework.ide.vscode.bosh.models.BoshCommandStemcellsProvider;
import org.springframework.ide.vscode.commons.languageserver.LaunguageServerApp;
public class Main {
public static void main(String[] args) throws IOException, InterruptedException {
LaunguageServerApp.start(() -> new BoshLanguageServer(
new BoshCommandCloudConfigProvider(),
new BoshCommandStemcellsProvider()
));
}
}
| Add BoshCommandStemcellsProvider properly to Bosh language server | Add BoshCommandStemcellsProvider properly to Bosh language server | Java | epl-1.0 | spring-projects/sts4,spring-projects/sts4,spring-projects/sts4,spring-projects/sts4,spring-projects/sts4,spring-projects/sts4 | java | ## Code Before:
/*******************************************************************************
* Copyright (c) 2016-2017 Pivotal, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Pivotal, Inc. - initial API and implementation
*******************************************************************************/
package org.springframework.ide.vscode.bosh;
import java.io.IOException;
import org.springframework.ide.vscode.bosh.models.BoshCommandCloudConfigProvider;
import org.springframework.ide.vscode.commons.languageserver.LaunguageServerApp;
public class Main {
public static void main(String[] args) throws IOException, InterruptedException {
LaunguageServerApp.start(() -> new BoshLanguageServer(
new BoshCommandCloudConfigProvider(),
(dc) -> null //TODO: real model provider here!
));
}
}
## Instruction:
Add BoshCommandStemcellsProvider properly to Bosh language server
## Code After:
/*******************************************************************************
* Copyright (c) 2016-2017 Pivotal, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Pivotal, Inc. - initial API and implementation
*******************************************************************************/
package org.springframework.ide.vscode.bosh;
import java.io.IOException;
import org.springframework.ide.vscode.bosh.models.BoshCommandCloudConfigProvider;
import org.springframework.ide.vscode.bosh.models.BoshCommandStemcellsProvider;
import org.springframework.ide.vscode.commons.languageserver.LaunguageServerApp;
public class Main {
public static void main(String[] args) throws IOException, InterruptedException {
LaunguageServerApp.start(() -> new BoshLanguageServer(
new BoshCommandCloudConfigProvider(),
new BoshCommandStemcellsProvider()
));
}
}
| /*******************************************************************************
* Copyright (c) 2016-2017 Pivotal, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Pivotal, Inc. - initial API and implementation
*******************************************************************************/
package org.springframework.ide.vscode.bosh;
import java.io.IOException;
import org.springframework.ide.vscode.bosh.models.BoshCommandCloudConfigProvider;
+ import org.springframework.ide.vscode.bosh.models.BoshCommandStemcellsProvider;
import org.springframework.ide.vscode.commons.languageserver.LaunguageServerApp;
public class Main {
public static void main(String[] args) throws IOException, InterruptedException {
LaunguageServerApp.start(() -> new BoshLanguageServer(
new BoshCommandCloudConfigProvider(),
- (dc) -> null //TODO: real model provider here!
+ new BoshCommandStemcellsProvider()
));
}
} | 3 | 0.12 | 2 | 1 |
1bd13ff811b7ac6463e2e5a49269df51bb386c3a | pombola/kenya/templates/core/person_appearances.html | pombola/kenya/templates/core/person_appearances.html | {% extends 'core/person_base.html' %}
{% load url from future %}
{% block title %}{{ object.name }} Appearances{% endblock %}
{% block subcontent %}
{% if settings.ENABLED_FEATURES.hansard %}
{% with hansard_count=object.hansard_entries.count %}
{% if hansard_count %}
<h2>Parliamentary appearances</h2>
<p>{{ person.name }} has spoken {{ hansard_entries.count }} times in Parliament.</p>
<h3>Recent Appearances</h3>
<div id="appearances" data-url="{% url "hansard:person_summary" slug=object.slug %}">
{% include 'core/person_detail_appearance_list.html' %}
</div>
<h3>Lifetime summary</h3>
<p>{{ person.name }} has month by month spoken in parliament this many times (if a month is missing there were no speeches found for that month):</p>
<ul>
{% for summary in lifetime_summary %}
<li>{{ summary.date|date:"M Y" }}: {{ summary.count }} speeches</li>
{% endfor %}
</ul>
{% else %}
<div>
<p>{{ object.name }} has never spoken in Parliament.</p>
</div>
{% endif %}
{% endwith %}
{% else %}
No appearances were found.
{% endif %}
{% endblock %}
| {% extends 'core/person_base.html' %}
{% load url from future %}
{% block title %}{{ object.name }} Appearances{% endblock %}
{% block subcontent %}
{% if settings.ENABLED_FEATURES.hansard %}
{% with hansard_count=object.hansard_entries.count %}
{% if hansard_count %}
<h2>Parliamentary appearances</h2>
<p>{{ person.name }} has spoken {{ hansard_entries.count }} times in Parliament.</p>
<h3>Recent Appearances</h3>
<div id="appearances" data-url="{% url "hansard:person_summary" slug=object.slug %}">
{% include 'core/person_detail_appearance_list.html' %}
</div>
<p>(<a href="{% url "hansard:person_appearances_all" slug=person.slug %}">View all speeches.</a>)</p>
<h3>Lifetime summary</h3>
<p>{{ person.name }} has month by month spoken in parliament this many times (if a month is missing there were no speeches found for that month):</p>
<ul>
{% for summary in lifetime_summary %}
<li>{{ summary.date|date:"M Y" }}: {{ summary.count }} speeches</li>
{% endfor %}
</ul>
{% else %}
<div>
<p>{{ object.name }} has never spoken in Parliament.</p>
</div>
{% endif %}
{% endwith %}
{% else %}
No appearances were found.
{% endif %}
{% endblock %}
| Put back the link to view all hansard speeches from a person | KE: Put back the link to view all hansard speeches from a person
This was introduced on master since this branch was created, and so
would otherwise be lost on merging.
| HTML | agpl-3.0 | patricmutwiri/pombola,hzj123/56th,geoffkilpin/pombola,patricmutwiri/pombola,hzj123/56th,patricmutwiri/pombola,hzj123/56th,mysociety/pombola,mysociety/pombola,geoffkilpin/pombola,mysociety/pombola,hzj123/56th,geoffkilpin/pombola,patricmutwiri/pombola,geoffkilpin/pombola,ken-muturi/pombola,geoffkilpin/pombola,mysociety/pombola,ken-muturi/pombola,ken-muturi/pombola,ken-muturi/pombola,mysociety/pombola,mysociety/pombola,ken-muturi/pombola,hzj123/56th,patricmutwiri/pombola,ken-muturi/pombola,patricmutwiri/pombola,hzj123/56th,geoffkilpin/pombola | html | ## Code Before:
{% extends 'core/person_base.html' %}
{% load url from future %}
{% block title %}{{ object.name }} Appearances{% endblock %}
{% block subcontent %}
{% if settings.ENABLED_FEATURES.hansard %}
{% with hansard_count=object.hansard_entries.count %}
{% if hansard_count %}
<h2>Parliamentary appearances</h2>
<p>{{ person.name }} has spoken {{ hansard_entries.count }} times in Parliament.</p>
<h3>Recent Appearances</h3>
<div id="appearances" data-url="{% url "hansard:person_summary" slug=object.slug %}">
{% include 'core/person_detail_appearance_list.html' %}
</div>
<h3>Lifetime summary</h3>
<p>{{ person.name }} has month by month spoken in parliament this many times (if a month is missing there were no speeches found for that month):</p>
<ul>
{% for summary in lifetime_summary %}
<li>{{ summary.date|date:"M Y" }}: {{ summary.count }} speeches</li>
{% endfor %}
</ul>
{% else %}
<div>
<p>{{ object.name }} has never spoken in Parliament.</p>
</div>
{% endif %}
{% endwith %}
{% else %}
No appearances were found.
{% endif %}
{% endblock %}
## Instruction:
KE: Put back the link to view all hansard speeches from a person
This was introduced on master since this branch was created, and so
would otherwise be lost on merging.
## Code After:
{% extends 'core/person_base.html' %}
{% load url from future %}
{% block title %}{{ object.name }} Appearances{% endblock %}
{% block subcontent %}
{% if settings.ENABLED_FEATURES.hansard %}
{% with hansard_count=object.hansard_entries.count %}
{% if hansard_count %}
<h2>Parliamentary appearances</h2>
<p>{{ person.name }} has spoken {{ hansard_entries.count }} times in Parliament.</p>
<h3>Recent Appearances</h3>
<div id="appearances" data-url="{% url "hansard:person_summary" slug=object.slug %}">
{% include 'core/person_detail_appearance_list.html' %}
</div>
<p>(<a href="{% url "hansard:person_appearances_all" slug=person.slug %}">View all speeches.</a>)</p>
<h3>Lifetime summary</h3>
<p>{{ person.name }} has month by month spoken in parliament this many times (if a month is missing there were no speeches found for that month):</p>
<ul>
{% for summary in lifetime_summary %}
<li>{{ summary.date|date:"M Y" }}: {{ summary.count }} speeches</li>
{% endfor %}
</ul>
{% else %}
<div>
<p>{{ object.name }} has never spoken in Parliament.</p>
</div>
{% endif %}
{% endwith %}
{% else %}
No appearances were found.
{% endif %}
{% endblock %}
| {% extends 'core/person_base.html' %}
{% load url from future %}
{% block title %}{{ object.name }} Appearances{% endblock %}
{% block subcontent %}
{% if settings.ENABLED_FEATURES.hansard %}
{% with hansard_count=object.hansard_entries.count %}
{% if hansard_count %}
<h2>Parliamentary appearances</h2>
<p>{{ person.name }} has spoken {{ hansard_entries.count }} times in Parliament.</p>
<h3>Recent Appearances</h3>
<div id="appearances" data-url="{% url "hansard:person_summary" slug=object.slug %}">
{% include 'core/person_detail_appearance_list.html' %}
</div>
+
+ <p>(<a href="{% url "hansard:person_appearances_all" slug=person.slug %}">View all speeches.</a>)</p>
<h3>Lifetime summary</h3>
<p>{{ person.name }} has month by month spoken in parliament this many times (if a month is missing there were no speeches found for that month):</p>
<ul>
{% for summary in lifetime_summary %}
<li>{{ summary.date|date:"M Y" }}: {{ summary.count }} speeches</li>
{% endfor %}
</ul>
{% else %}
<div>
<p>{{ object.name }} has never spoken in Parliament.</p>
</div>
{% endif %}
{% endwith %}
{% else %}
No appearances were found.
{% endif %}
{% endblock %} | 2 | 0.044444 | 2 | 0 |
565b70c22a45fe3f6c3b72a24a51d2620766b63c | docs/pages/technical-specs/expo-sfv-0.md | docs/pages/technical-specs/expo-sfv-0.md | ---
title: Expo Structured Field Values
sidebar_title: Expo Structured Field Values
---
Version 0
---
Structured Field Values for HTTP, [IETF RFC 8941](https://tools.ietf.org/html/rfc8941), is a proposal to formalize header syntax and facilitate nested data.
Since it is still a work in progress, Expo maintains a custom version that only implements the following subset of the protocol defined in [IETF RFC 8941](https://tools.ietf.org/html/rfc8941):
* All key values
* String, integer, and decimal items
* Dictionaries
| ---
title: Expo Structured Field Values
sidebar_title: Expo Structured Field Values
---
Version 0
Updated 2021-05-24
---
Structured Field Values for HTTP, [IETF RFC 8941](https://tools.ietf.org/html/rfc8941), is a proposal to formalize header syntax and facilitate nested data.
Since it is still a work in progress, Expo maintains a custom version that only implements the following subset of the protocol defined in [IETF RFC 8941](https://tools.ietf.org/html/rfc8941):
* All key values
* String, integer, and decimal items
* Dictionaries
| Add date to Expo SFV docs | [docs] Add date to Expo SFV docs
This provides some more context about the how recently the spec was updated. | Markdown | bsd-3-clause | exponent/exponent,exponentjs/exponent,exponentjs/exponent,exponent/exponent,exponentjs/exponent,exponent/exponent,exponentjs/exponent,exponentjs/exponent,exponent/exponent,exponent/exponent,exponentjs/exponent,exponent/exponent,exponent/exponent,exponentjs/exponent,exponentjs/exponent,exponent/exponent,exponent/exponent,exponentjs/exponent | markdown | ## Code Before:
---
title: Expo Structured Field Values
sidebar_title: Expo Structured Field Values
---
Version 0
---
Structured Field Values for HTTP, [IETF RFC 8941](https://tools.ietf.org/html/rfc8941), is a proposal to formalize header syntax and facilitate nested data.
Since it is still a work in progress, Expo maintains a custom version that only implements the following subset of the protocol defined in [IETF RFC 8941](https://tools.ietf.org/html/rfc8941):
* All key values
* String, integer, and decimal items
* Dictionaries
## Instruction:
[docs] Add date to Expo SFV docs
This provides some more context about the how recently the spec was updated.
## Code After:
---
title: Expo Structured Field Values
sidebar_title: Expo Structured Field Values
---
Version 0
Updated 2021-05-24
---
Structured Field Values for HTTP, [IETF RFC 8941](https://tools.ietf.org/html/rfc8941), is a proposal to formalize header syntax and facilitate nested data.
Since it is still a work in progress, Expo maintains a custom version that only implements the following subset of the protocol defined in [IETF RFC 8941](https://tools.ietf.org/html/rfc8941):
* All key values
* String, integer, and decimal items
* Dictionaries
| ---
title: Expo Structured Field Values
sidebar_title: Expo Structured Field Values
---
Version 0
+
+ Updated 2021-05-24
---
Structured Field Values for HTTP, [IETF RFC 8941](https://tools.ietf.org/html/rfc8941), is a proposal to formalize header syntax and facilitate nested data.
Since it is still a work in progress, Expo maintains a custom version that only implements the following subset of the protocol defined in [IETF RFC 8941](https://tools.ietf.org/html/rfc8941):
* All key values
* String, integer, and decimal items
* Dictionaries | 2 | 0.133333 | 2 | 0 |
57709a6821b7ac894af9f737f13683a86b81d11b | alura/html-css/receitas.css | alura/html-css/receitas.css | body {
width: 940px;
margin-right: auto;
margin-left: auto;
font-family: Helvetica, Arial, sans-serif;
background-color: #f5f0cd;
}
h2 {
padding: 5px;
color: #fff;
background-color: #b3a742;
}
#td-nutrientes {
width: 40%;
margin-right: auto;
margin-left: auto;
border: 1px solid #000;
}
#td-nutrientes th {
background-color: #F8EA6B;
}
#td-nutrientes tr:nth-child(even) {
background-color: #F8EAB3;
}
#td-nutrientes tr:nth-child(odd) {
background-color: #fff;
}
nav ul li {
display: inline;
margin-right: 50px;
}
#topo {
text-align: center;
}
| body {
width: 940px;
margin-right: auto;
margin-left: auto;
font-family: Helvetica, Arial, sans-serif;
background-color: #f5f0cd;
}
h2 {
padding: 5px;
color: #fff;
background-color: #b3a742;
background-image: linear-gradient(#9c8f67, #c7be9b);
border: 1px solid #000;
border-radius: 15px;
}
figure {
width: 450px;
padding: 15px;
font-family: cursive;
text-align: center;
background-color: #fff;
border: 1px solid #000;
box-shadow: 5px 5px 3px rgba(0, 0, 0, 0.7);
transform: rotate(0deg);
}
figure:hover {
transform: rotate(-5deg);
}
#td-nutrientes {
width: 40%;
margin-right: auto;
margin-left: auto;
border: 1px solid #000;
}
#td-nutrientes th {
background-color: #F8EA6B;
}
#td-nutrientes tr:nth-child(even) {
background-color: #F8EAB3;
}
#td-nutrientes tr:nth-child(odd) {
background-color: #fff;
}
nav ul li {
display: inline;
margin-right: 50px;
}
#topo {
text-align: center;
}
| Update file, Alura, Dando os primeiros passos na web com HTML e CSS, Aula 5 | Update file, Alura, Dando os primeiros passos na web com HTML e CSS, Aula 5
| CSS | mit | fabriciofmsilva/labs,fabriciofmsilva/labs,fabriciofmsilva/labs,fabriciofmsilva/labs,fabriciofmsilva/labs,fabriciofmsilva/labs,fabriciofmsilva/labs,fabriciofmsilva/labs,fabriciofmsilva/labs,fabriciofmsilva/labs,fabriciofmsilva/labs,fabriciofmsilva/labs,fabriciofmsilva/labs,fabriciofmsilva/labs | css | ## Code Before:
body {
width: 940px;
margin-right: auto;
margin-left: auto;
font-family: Helvetica, Arial, sans-serif;
background-color: #f5f0cd;
}
h2 {
padding: 5px;
color: #fff;
background-color: #b3a742;
}
#td-nutrientes {
width: 40%;
margin-right: auto;
margin-left: auto;
border: 1px solid #000;
}
#td-nutrientes th {
background-color: #F8EA6B;
}
#td-nutrientes tr:nth-child(even) {
background-color: #F8EAB3;
}
#td-nutrientes tr:nth-child(odd) {
background-color: #fff;
}
nav ul li {
display: inline;
margin-right: 50px;
}
#topo {
text-align: center;
}
## Instruction:
Update file, Alura, Dando os primeiros passos na web com HTML e CSS, Aula 5
## Code After:
body {
width: 940px;
margin-right: auto;
margin-left: auto;
font-family: Helvetica, Arial, sans-serif;
background-color: #f5f0cd;
}
h2 {
padding: 5px;
color: #fff;
background-color: #b3a742;
background-image: linear-gradient(#9c8f67, #c7be9b);
border: 1px solid #000;
border-radius: 15px;
}
figure {
width: 450px;
padding: 15px;
font-family: cursive;
text-align: center;
background-color: #fff;
border: 1px solid #000;
box-shadow: 5px 5px 3px rgba(0, 0, 0, 0.7);
transform: rotate(0deg);
}
figure:hover {
transform: rotate(-5deg);
}
#td-nutrientes {
width: 40%;
margin-right: auto;
margin-left: auto;
border: 1px solid #000;
}
#td-nutrientes th {
background-color: #F8EA6B;
}
#td-nutrientes tr:nth-child(even) {
background-color: #F8EAB3;
}
#td-nutrientes tr:nth-child(odd) {
background-color: #fff;
}
nav ul li {
display: inline;
margin-right: 50px;
}
#topo {
text-align: center;
}
| body {
width: 940px;
margin-right: auto;
margin-left: auto;
font-family: Helvetica, Arial, sans-serif;
background-color: #f5f0cd;
}
h2 {
padding: 5px;
color: #fff;
background-color: #b3a742;
+ background-image: linear-gradient(#9c8f67, #c7be9b);
+ border: 1px solid #000;
+ border-radius: 15px;
+ }
+
+ figure {
+ width: 450px;
+ padding: 15px;
+ font-family: cursive;
+ text-align: center;
+ background-color: #fff;
+ border: 1px solid #000;
+ box-shadow: 5px 5px 3px rgba(0, 0, 0, 0.7);
+ transform: rotate(0deg);
+ }
+
+ figure:hover {
+ transform: rotate(-5deg);
}
#td-nutrientes {
width: 40%;
margin-right: auto;
margin-left: auto;
border: 1px solid #000;
}
#td-nutrientes th {
background-color: #F8EA6B;
}
#td-nutrientes tr:nth-child(even) {
background-color: #F8EAB3;
}
#td-nutrientes tr:nth-child(odd) {
background-color: #fff;
}
nav ul li {
display: inline;
margin-right: 50px;
}
#topo {
text-align: center;
} | 18 | 0.439024 | 18 | 0 |
e28756a3699fe41e33b3a0d031b5b098918ad3c5 | .travis.yml | .travis.yml | install:
- gem install awesome_bot
script:
- awesome_bot **/*.md --allow-dupe --allow-redirect --skip-save-results
| install:
- gem install awesome_bot
script:
- awesome_bot **/*.md --allow-dupe --allow-redirect --allow 429 --skip-save-results
| Allow 429 in awesome bot | Allow 429 in awesome bot
| YAML | mit | yangshun/tech-interview-handbook,yangshun/tech-interview-handbook,yangshun/tech-interview-handbook,yangshun/tech-interview-handbook,yangshun/tech-interview-handbook | yaml | ## Code Before:
install:
- gem install awesome_bot
script:
- awesome_bot **/*.md --allow-dupe --allow-redirect --skip-save-results
## Instruction:
Allow 429 in awesome bot
## Code After:
install:
- gem install awesome_bot
script:
- awesome_bot **/*.md --allow-dupe --allow-redirect --allow 429 --skip-save-results
| install:
- gem install awesome_bot
script:
- - awesome_bot **/*.md --allow-dupe --allow-redirect --skip-save-results
+ - awesome_bot **/*.md --allow-dupe --allow-redirect --allow 429 --skip-save-results
? ++++++++++++
| 2 | 0.4 | 1 | 1 |
3fcf3e86bb526cea31e428857ac81331a982d6f3 | bin/run-test.sh | bin/run-test.sh |
while true; do
case $1 in
-c)
cover=1
;;
*)
break
esac
shift
done
# Lint
$(npm bin)/jshint . || exit 1
# Get test/coverage command
counter=0
function run {
C="$(npm bin)/istanbul test"
if [ "$cover" ]; then
C="$(npm bin)/istanbul cover --dir ./coverage/${counter}"
((counter++))
fi
($C "$(npm bin)/_mocha" -- $* --timeout 4000 --R spec) || exit 1
}
# Run test/coverage
run test
for test in test/standalone/test-*.js ;
do
run "${test}"
done
# Conditionally publish coverage
if [ "$cover" ]; then
$(npm bin)/istanbul report lcovonly
./node_modules/coveralls/bin/coveralls.js < ./coverage/lcov.info
rm -rf ./coverage
fi
if [ -z "${TRAVIS_PULL_REQUEST}" ] || [ "${TRAVIS_PULL_REQUEST}" = "false" ]
then
./bin/run-e2e.sh
fi
|
while true; do
case $1 in
-c)
cover=1
;;
*)
break
esac
shift
done
# Lint
$(npm bin)/jshint . || exit 1
# Get test/coverage command
counter=0
function run {
C="$(npm bin)/istanbul test"
if [ "$cover" ]; then
C="$(npm bin)/istanbul cover --dir ./coverage/${counter}"
((counter++))
fi
($C "$(npm bin)/_mocha" -- $* --timeout 4000 --R spec) || exit 1
}
# Run test/coverage
run test
for test in test/standalone/test-*.js ;
do
run "${test}"
done
# Conditionally publish coverage
if [ "$cover" ]; then
$(npm bin)/istanbul report lcovonly
./node_modules/coveralls/bin/coveralls.js < ./coverage/lcov.info
rm -rf ./coverage
fi
| Remove e2e tests from travis | Remove e2e tests from travis
These have been failing for a while but only recently started making
travis go red. We should investigate running these regularly in an
environment with appropriate gcloud credentials.
| Shell | apache-2.0 | DominicKramer/cloud-debug-nodejs,googleapis/cloud-debug-nodejs,ofrobots/cloud-debug-nodejs,googleapis/cloud-debug-nodejs,matthewloring/cloud-debug-nodejs,googleapis/cloud-debug-nodejs,matthewloring/cloud-debug-nodejs,DominicKramer/cloud-debug-nodejs,ofrobots/cloud-debug-nodejs,matthewloring/cloud-debug-nodejs,ofrobots/cloud-debug-nodejs,DominicKramer/cloud-debug-nodejs | shell | ## Code Before:
while true; do
case $1 in
-c)
cover=1
;;
*)
break
esac
shift
done
# Lint
$(npm bin)/jshint . || exit 1
# Get test/coverage command
counter=0
function run {
C="$(npm bin)/istanbul test"
if [ "$cover" ]; then
C="$(npm bin)/istanbul cover --dir ./coverage/${counter}"
((counter++))
fi
($C "$(npm bin)/_mocha" -- $* --timeout 4000 --R spec) || exit 1
}
# Run test/coverage
run test
for test in test/standalone/test-*.js ;
do
run "${test}"
done
# Conditionally publish coverage
if [ "$cover" ]; then
$(npm bin)/istanbul report lcovonly
./node_modules/coveralls/bin/coveralls.js < ./coverage/lcov.info
rm -rf ./coverage
fi
if [ -z "${TRAVIS_PULL_REQUEST}" ] || [ "${TRAVIS_PULL_REQUEST}" = "false" ]
then
./bin/run-e2e.sh
fi
## Instruction:
Remove e2e tests from travis
These have been failing for a while but only recently started making
travis go red. We should investigate running these regularly in an
environment with appropriate gcloud credentials.
## Code After:
while true; do
case $1 in
-c)
cover=1
;;
*)
break
esac
shift
done
# Lint
$(npm bin)/jshint . || exit 1
# Get test/coverage command
counter=0
function run {
C="$(npm bin)/istanbul test"
if [ "$cover" ]; then
C="$(npm bin)/istanbul cover --dir ./coverage/${counter}"
((counter++))
fi
($C "$(npm bin)/_mocha" -- $* --timeout 4000 --R spec) || exit 1
}
# Run test/coverage
run test
for test in test/standalone/test-*.js ;
do
run "${test}"
done
# Conditionally publish coverage
if [ "$cover" ]; then
$(npm bin)/istanbul report lcovonly
./node_modules/coveralls/bin/coveralls.js < ./coverage/lcov.info
rm -rf ./coverage
fi
|
while true; do
case $1 in
-c)
cover=1
;;
*)
break
esac
shift
done
# Lint
$(npm bin)/jshint . || exit 1
# Get test/coverage command
counter=0
function run {
C="$(npm bin)/istanbul test"
if [ "$cover" ]; then
C="$(npm bin)/istanbul cover --dir ./coverage/${counter}"
((counter++))
fi
($C "$(npm bin)/_mocha" -- $* --timeout 4000 --R spec) || exit 1
}
# Run test/coverage
run test
for test in test/standalone/test-*.js ;
do
run "${test}"
done
# Conditionally publish coverage
if [ "$cover" ]; then
$(npm bin)/istanbul report lcovonly
./node_modules/coveralls/bin/coveralls.js < ./coverage/lcov.info
rm -rf ./coverage
fi
-
- if [ -z "${TRAVIS_PULL_REQUEST}" ] || [ "${TRAVIS_PULL_REQUEST}" = "false" ]
- then
- ./bin/run-e2e.sh
- fi | 5 | 0.108696 | 0 | 5 |
5f168676daa1acaf5f80c0641a6c7f1e88b7c177 | appveyor.yml | appveyor.yml | build_script: "\"%VS120COMNTOOLS%VsDevCmd.bat\" && build.cmd"
test: off # disable automatic test discovery, xUnit already runs as part of build.cmd | build_script:
- '"%VS120COMNTOOLS%VsDevCmd.bat"'
- build.cmd
test: off # disable automatic test discovery, xUnit already runs as part of build.cmd | Fix AppVeyor build script to correctly return error code | Fix AppVeyor build script to correctly return error code
The script chained the execution of VsDevCmd.bat (aka the "Developer
Command Prompt") with build.cmd which seems to confuse AppVeyor
into not recognizing a non-zero exit code (i.e. when a test fails)
and incorrectly marking the build as green.
Invoking VsDevCmd.bat and build.cmd in two separate steps fixes this.
Fixes #214
| YAML | mit | tstringer/corefx,seanshpark/corefx,contoso-dnf-a/corefx,SGuyGe/corefx,the-dwyer/corefx,cartermp/corefx,690486439/corefx,ptoonen/corefx,manu-silicon/corefx,mokchhya/corefx,ravimeda/corefx,manu-silicon/corefx,alexandrnikitin/corefx,PatrickMcDonald/corefx,richlander/corefx,Frank125/corefx,parjong/corefx,anjumrizwi/corefx,fffej/corefx,shmao/corefx,shmao/corefx,nchikanov/corefx,weltkante/corefx,elijah6/corefx,YoupHulsebos/corefx,cartermp/corefx,VPashkov/corefx,claudelee/corefx,brett25/corefx,dhoehna/corefx,Priya91/corefx-1,comdiv/corefx,larsbj1988/corefx,jlin177/corefx,alphonsekurian/corefx,shmao/corefx,FiveTimesTheFun/corefx,ericstj/corefx,690486439/corefx,erpframework/corefx,benjamin-bader/corefx,jeremymeng/corefx,yizhang82/corefx,Frank125/corefx,mokchhya/corefx,cydhaselton/corefx,fernando-rodriguez/corefx,jlin177/corefx,shimingsg/corefx,ptoonen/corefx,s0ne0me/corefx,jhendrixMSFT/corefx,Petermarcu/corefx,akivafr123/corefx,ellismg/corefx,iamjasonp/corefx,Ermiar/corefx,weltkante/corefx,shahid-pk/corefx,tijoytom/corefx,VPashkov/corefx,ravimeda/corefx,bpschoch/corefx,stone-li/corefx,xuweixuwei/corefx,mafiya69/corefx,dhoehna/corefx,jhendrixMSFT/corefx,DnlHarvey/corefx,nbarbettini/corefx,stephenmichaelf/corefx,SGuyGe/corefx,parjong/corefx,gkhanna79/corefx,Chrisboh/corefx,brett25/corefx,fgreinacher/corefx,rahku/corefx,cydhaselton/corefx,mellinoe/corefx,marksmeltzer/corefx,mokchhya/corefx,wtgodbe/corefx,vidhya-bv/corefx-sorting,zhenlan/corefx,Jiayili1/corefx,marksmeltzer/corefx,twsouthwick/corefx,akivafr123/corefx,claudelee/corefx,bitcrazed/corefx,YoupHulsebos/corefx,shmao/corefx,PatrickMcDonald/corefx,Alcaro/corefx,gkhanna79/corefx,seanshpark/corefx,heXelium/corefx,ptoonen/corefx,the-dwyer/corefx,josguil/corefx,shrutigarg/corefx,alexperovich/corefx,rajansingh10/corefx,claudelee/corefx,zhenlan/corefx,shahid-pk/corefx,fgreinacher/corefx,lggomez/corefx,fffej/corefx,Chrisboh/corefx,rajansingh10/corefx,CloudLens/corefx,MaggieTsang/corefx,vijaykota/corefx,marksmeltzer/corefx,jmhardison/corefx,alphonsekurian/corefx,benjamin-bader/corefx,shana/corefx,jhendrixMSFT/corefx,shiftkey-tester/corefx,YoupHulsebos/corefx,wtgodbe/corefx,andyhebear/corefx,shrutigarg/corefx,erpframework/corefx,marksmeltzer/corefx,mafiya69/corefx,CloudLens/corefx,xuweixuwei/corefx,690486439/corefx,zmaruo/corefx,iamjasonp/corefx,lydonchandra/corefx,spoiledsport/corefx,nelsonsar/corefx,Ermiar/corefx,bpschoch/corefx,jcme/corefx,dtrebbien/corefx,nbarbettini/corefx,vrassouli/corefx,Winsto/corefx,chenxizhang/corefx,dotnet-bot/corefx,SGuyGe/corefx,Yanjing123/corefx,mmitche/corefx,rubo/corefx,jcme/corefx,jcme/corefx,kkurni/corefx,DnlHarvey/corefx,vs-team/corefx,shawnhar/corefx,fernando-rodriguez/corefx,cydhaselton/corefx,tijoytom/corefx,Alcaro/corefx,dotnet-bot/corefx,andyhebear/corefx,spoiledsport/corefx,gabrielPeart/corefx,elijah6/corefx,wtgodbe/corefx,alphonsekurian/corefx,marksmeltzer/corefx,akivafr123/corefx,stone-li/corefx,billwert/corefx,lydonchandra/corefx,jeremymeng/corefx,chenkennt/corefx,richlander/corefx,kyulee1/corefx,popolan1986/corefx,nelsonsar/corefx,gkhanna79/corefx,rajansingh10/corefx,jmhardison/corefx,jlin177/corefx,billwert/corefx,nchikanov/corefx,krytarowski/corefx,mazong1123/corefx,tijoytom/corefx,matthubin/corefx,wtgodbe/corefx,Alcaro/corefx,marksmeltzer/corefx,misterzik/corefx,690486439/corefx,jhendrixMSFT/corefx,nchikanov/corefx,arronei/corefx,shimingsg/corefx,cydhaselton/corefx,rjxby/corefx,PatrickMcDonald/corefx,nelsonsar/corefx,adamralph/corefx,Ermiar/corefx,s0ne0me/corefx,stormleoxia/corefx,ellismg/corefx,mafiya69/corefx,twsouthwick/corefx,seanshpark/corefx,uhaciogullari/corefx,ravimeda/corefx,matthubin/corefx,Jiayili1/corefx,heXelium/corefx,oceanho/corefx,cydhaselton/corefx,krytarowski/corefx,fgreinacher/corefx,shmao/corefx,benjamin-bader/corefx,yizhang82/corefx,nchikanov/corefx,destinyclown/corefx,pallavit/corefx,pallavit/corefx,bitcrazed/corefx,seanshpark/corefx,alphonsekurian/corefx,JosephTremoulet/corefx,janhenke/corefx,n1ghtmare/corefx,Frank125/corefx,seanshpark/corefx,BrennanConroy/corefx,gkhanna79/corefx,cydhaselton/corefx,stormleoxia/corefx,DnlHarvey/corefx,arronei/corefx,gregg-miskelly/corefx,cartermp/corefx,rjxby/corefx,zhenlan/corefx,contoso-dnf-a/corefx,jeremymeng/corefx,mellinoe/corefx,pgavlin/corefx,CloudLens/corefx,nbarbettini/corefx,dsplaisted/corefx,stephenmichaelf/corefx,anjumrizwi/corefx,nbarbettini/corefx,elijah6/corefx,stone-li/corefx,alexandrnikitin/corefx,Chrisboh/corefx,richlander/corefx,Winsto/corefx,gkhanna79/corefx,pgavlin/corefx,JosephTremoulet/corefx,axelheer/corefx,alphonsekurian/corefx,chaitrakeshav/corefx,the-dwyer/corefx,weltkante/corefx,heXelium/corefx,dhoehna/corefx,brett25/corefx,tstringer/corefx,akivafr123/corefx,krk/corefx,billwert/corefx,chaitrakeshav/corefx,FiveTimesTheFun/corefx,dsplaisted/corefx,SGuyGe/corefx,jlin177/corefx,tstringer/corefx,Yanjing123/corefx,huanjie/corefx,lggomez/corefx,josguil/corefx,benpye/corefx,stephenmichaelf/corefx,rubo/corefx,JosephTremoulet/corefx,dotnet-bot/corefx,twsouthwick/corefx,elijah6/corefx,stone-li/corefx,VPashkov/corefx,KrisLee/corefx,shimingsg/corefx,mokchhya/corefx,andyhebear/corefx,Jiayili1/corefx,krk/corefx,Alcaro/corefx,heXelium/corefx,benjamin-bader/corefx,fffej/corefx,thiagodin/corefx,claudelee/corefx,benpye/corefx,nelsonsar/corefx,mmitche/corefx,alexandrnikitin/corefx,alexperovich/corefx,rjxby/corefx,Priya91/corefx-1,cydhaselton/corefx,dhoehna/corefx,destinyclown/corefx,tijoytom/corefx,viniciustaveira/corefx,mellinoe/corefx,Petermarcu/corefx,xuweixuwei/corefx,josguil/corefx,comdiv/corefx,parjong/corefx,vidhya-bv/corefx-sorting,popolan1986/corefx,huanjie/corefx,elijah6/corefx,ericstj/corefx,ViktorHofer/corefx,ViktorHofer/corefx,khdang/corefx,JosephTremoulet/corefx,rjxby/corefx,jhendrixMSFT/corefx,huanjie/corefx,stone-li/corefx,shiftkey-tester/corefx,nchikanov/corefx,Petermarcu/corefx,stone-li/corefx,shimingsg/corefx,brett25/corefx,iamjasonp/corefx,oceanho/corefx,ericstj/corefx,jcme/corefx,iamjasonp/corefx,viniciustaveira/corefx,rahku/corefx,shawnhar/corefx,jlin177/corefx,Yanjing123/corefx,alexperovich/corefx,dotnet-bot/corefx,lggomez/corefx,mazong1123/corefx,n1ghtmare/corefx,erpframework/corefx,akivafr123/corefx,pallavit/corefx,larsbj1988/corefx,YoupHulsebos/corefx,vrassouli/corefx,khdang/corefx,manu-silicon/corefx,Priya91/corefx-1,thiagodin/corefx,misterzik/corefx,benpye/corefx,ViktorHofer/corefx,jeremymeng/corefx,krk/corefx,Ermiar/corefx,gregg-miskelly/corefx,kyulee1/corefx,zhangwenquan/corefx,matthubin/corefx,zhenlan/corefx,shrutigarg/corefx,dkorolev/corefx,vidhya-bv/corefx-sorting,rubo/corefx,BrennanConroy/corefx,mazong1123/corefx,dhoehna/corefx,khdang/corefx,zmaruo/corefx,shahid-pk/corefx,axelheer/corefx,billwert/corefx,viniciustaveira/corefx,zhangwenquan/corefx,ericstj/corefx,shrutigarg/corefx,khdang/corefx,scott156/corefx,gabrielPeart/corefx,KrisLee/corefx,billwert/corefx,EverlessDrop41/corefx,Yanjing123/corefx,ravimeda/corefx,mellinoe/corefx,KrisLee/corefx,lydonchandra/corefx,kyulee1/corefx,zhangwenquan/corefx,tijoytom/corefx,Jiayili1/corefx,Petermarcu/corefx,nchikanov/corefx,VPashkov/corefx,alexperovich/corefx,cartermp/corefx,vrassouli/corefx,YoupHulsebos/corefx,richlander/corefx,stephenmichaelf/corefx,kkurni/corefx,s0ne0me/corefx,manu-silicon/corefx,popolan1986/corefx,yizhang82/corefx,oceanho/corefx,vijaykota/corefx,n1ghtmare/corefx,Frank125/corefx,rahku/corefx,EverlessDrop41/corefx,jhendrixMSFT/corefx,mafiya69/corefx,khdang/corefx,the-dwyer/corefx,MaggieTsang/corefx,weltkante/corefx,ViktorHofer/corefx,seanshpark/corefx,ptoonen/corefx,shiftkey-tester/corefx,richlander/corefx,larsbj1988/corefx,dtrebbien/corefx,lggomez/corefx,tstringer/corefx,yizhang82/corefx,wtgodbe/corefx,Ermiar/corefx,richlander/corefx,stephenmichaelf/corefx,cartermp/corefx,shawnhar/corefx,mokchhya/corefx,dtrebbien/corefx,weltkante/corefx,axelheer/corefx,krk/corefx,mmitche/corefx,richlander/corefx,MaggieTsang/corefx,benpye/corefx,ellismg/corefx,mmitche/corefx,erpframework/corefx,ptoonen/corefx,parjong/corefx,jcme/corefx,shimingsg/corefx,chaitrakeshav/corefx,stephenmichaelf/corefx,twsouthwick/corefx,arronei/corefx,gabrielPeart/corefx,stormleoxia/corefx,shahid-pk/corefx,vs-team/corefx,pgavlin/corefx,Jiayili1/corefx,huanjie/corefx,thiagodin/corefx,parjong/corefx,janhenke/corefx,cnbin/corefx,vrassouli/corefx,KrisLee/corefx,kkurni/corefx,benjamin-bader/corefx,janhenke/corefx,chenkennt/corefx,Priya91/corefx-1,weltkante/corefx,krytarowski/corefx,rjxby/corefx,wtgodbe/corefx,manu-silicon/corefx,mokchhya/corefx,DnlHarvey/corefx,shmao/corefx,Petermarcu/corefx,matthubin/corefx,pallavit/corefx,krytarowski/corefx,jlin177/corefx,kkurni/corefx,CherryCxldn/corefx,scott156/corefx,krk/corefx,CherryCxldn/corefx,gkhanna79/corefx,destinyclown/corefx,larsbj1988/corefx,shahid-pk/corefx,josguil/corefx,SGuyGe/corefx,shana/corefx,rubo/corefx,mazong1123/corefx,mazong1123/corefx,comdiv/corefx,fernando-rodriguez/corefx,misterzik/corefx,ericstj/corefx,rahku/corefx,MaggieTsang/corefx,DnlHarvey/corefx,Petermarcu/corefx,jeremymeng/corefx,EverlessDrop41/corefx,manu-silicon/corefx,yizhang82/corefx,Chrisboh/corefx,mellinoe/corefx,benpye/corefx,mmitche/corefx,alphonsekurian/corefx,benjamin-bader/corefx,tstringer/corefx,krytarowski/corefx,Ermiar/corefx,alexandrnikitin/corefx,dkorolev/corefx,ViktorHofer/corefx,DnlHarvey/corefx,pallavit/corefx,the-dwyer/corefx,adamralph/corefx,cnbin/corefx,rahku/corefx,lggomez/corefx,bitcrazed/corefx,viniciustaveira/corefx,mafiya69/corefx,scott156/corefx,axelheer/corefx,seanshpark/corefx,Jiayili1/corefx,ptoonen/corefx,mellinoe/corefx,Ermiar/corefx,bitcrazed/corefx,SGuyGe/corefx,pgavlin/corefx,shiftkey-tester/corefx,andyhebear/corefx,comdiv/corefx,Jiayili1/corefx,cnbin/corefx,BrennanConroy/corefx,dhoehna/corefx,kkurni/corefx,Priya91/corefx-1,JosephTremoulet/corefx,the-dwyer/corefx,twsouthwick/corefx,JosephTremoulet/corefx,fgreinacher/corefx,YoupHulsebos/corefx,Yanjing123/corefx,MaggieTsang/corefx,alphonsekurian/corefx,khdang/corefx,bpschoch/corefx,adamralph/corefx,shmao/corefx,josguil/corefx,ViktorHofer/corefx,Winsto/corefx,lggomez/corefx,janhenke/corefx,weltkante/corefx,dsplaisted/corefx,rahku/corefx,mazong1123/corefx,spoiledsport/corefx,uhaciogullari/corefx,vidhya-bv/corefx-sorting,vs-team/corefx,PatrickMcDonald/corefx,rjxby/corefx,ellismg/corefx,rahku/corefx,zhenlan/corefx,uhaciogullari/corefx,axelheer/corefx,s0ne0me/corefx,tstringer/corefx,the-dwyer/corefx,twsouthwick/corefx,nbarbettini/corefx,mmitche/corefx,CherryCxldn/corefx,CherryCxldn/corefx,janhenke/corefx,oceanho/corefx,lggomez/corefx,krk/corefx,lydonchandra/corefx,krk/corefx,manu-silicon/corefx,n1ghtmare/corefx,gabrielPeart/corefx,DnlHarvey/corefx,Chrisboh/corefx,MaggieTsang/corefx,nbarbettini/corefx,thiagodin/corefx,dotnet-bot/corefx,bpschoch/corefx,chenxizhang/corefx,690486439/corefx,dkorolev/corefx,vijaykota/corefx,rjxby/corefx,zmaruo/corefx,FiveTimesTheFun/corefx,JosephTremoulet/corefx,nbarbettini/corefx,dotnet-bot/corefx,cartermp/corefx,bitcrazed/corefx,wtgodbe/corefx,elijah6/corefx,gregg-miskelly/corefx,vs-team/corefx,chaitrakeshav/corefx,jcme/corefx,mmitche/corefx,elijah6/corefx,PatrickMcDonald/corefx,vidhya-bv/corefx-sorting,CloudLens/corefx,krytarowski/corefx,gkhanna79/corefx,stormleoxia/corefx,billwert/corefx,jlin177/corefx,ptoonen/corefx,ViktorHofer/corefx,chenkennt/corefx,dtrebbien/corefx,kkurni/corefx,YoupHulsebos/corefx,billwert/corefx,ericstj/corefx,Priya91/corefx-1,mazong1123/corefx,krytarowski/corefx,dkorolev/corefx,alexperovich/corefx,axelheer/corefx,marksmeltzer/corefx,shimingsg/corefx,zhenlan/corefx,shimingsg/corefx,cnbin/corefx,kyulee1/corefx,dotnet-bot/corefx,Petermarcu/corefx,Chrisboh/corefx,nchikanov/corefx,zhenlan/corefx,twsouthwick/corefx,janhenke/corefx,tijoytom/corefx,shana/corefx,yizhang82/corefx,uhaciogullari/corefx,pallavit/corefx,rajansingh10/corefx,dhoehna/corefx,tijoytom/corefx,zhangwenquan/corefx,josguil/corefx,anjumrizwi/corefx,shana/corefx,fffej/corefx,stephenmichaelf/corefx,benpye/corefx,parjong/corefx,stone-li/corefx,MaggieTsang/corefx,iamjasonp/corefx,ellismg/corefx,ravimeda/corefx,gregg-miskelly/corefx,jmhardison/corefx,ravimeda/corefx,scott156/corefx,rubo/corefx,alexperovich/corefx,mafiya69/corefx,contoso-dnf-a/corefx,jhendrixMSFT/corefx,yizhang82/corefx,ellismg/corefx,zmaruo/corefx,parjong/corefx,ravimeda/corefx,iamjasonp/corefx,n1ghtmare/corefx,shahid-pk/corefx,iamjasonp/corefx,alexperovich/corefx,anjumrizwi/corefx,chenxizhang/corefx,ericstj/corefx,jmhardison/corefx,alexandrnikitin/corefx | yaml | ## Code Before:
build_script: "\"%VS120COMNTOOLS%VsDevCmd.bat\" && build.cmd"
test: off # disable automatic test discovery, xUnit already runs as part of build.cmd
## Instruction:
Fix AppVeyor build script to correctly return error code
The script chained the execution of VsDevCmd.bat (aka the "Developer
Command Prompt") with build.cmd which seems to confuse AppVeyor
into not recognizing a non-zero exit code (i.e. when a test fails)
and incorrectly marking the build as green.
Invoking VsDevCmd.bat and build.cmd in two separate steps fixes this.
Fixes #214
## Code After:
build_script:
- '"%VS120COMNTOOLS%VsDevCmd.bat"'
- build.cmd
test: off # disable automatic test discovery, xUnit already runs as part of build.cmd | - build_script: "\"%VS120COMNTOOLS%VsDevCmd.bat\" && build.cmd"
+ build_script:
+ - '"%VS120COMNTOOLS%VsDevCmd.bat"'
+ - build.cmd
test: off # disable automatic test discovery, xUnit already runs as part of build.cmd | 4 | 2 | 3 | 1 |
beda03dc46139d03c112949308ff43147633dddb | cmake/OpenSyncTesting.cmake | cmake/OpenSyncTesting.cmake | ENABLE_TESTING()
INCLUDE( CTest )
MACRO( OPENSYNC_ADD_TEST _testName _testSource _targetLibraries )
ADD_EXECUTABLE( ${_testName} ${_testSource} )
TARGET_LINK_LIBRARIES( ${_testName} ${_targetLibraries} )
ADD_TEST( ${_testName} ${CMAKE_CURRENT_BINARY_DIR}/${_testName} )
ENDMACRO( OPENSYNC_ADD_TEST )
| ENABLE_TESTING()
INCLUDE( CTest )
MACRO( OPENSYNC_ADD_TEST _testName _testSource )
ADD_EXECUTABLE( ${_testName} ${_testSource} )
TARGET_LINK_LIBRARIES( ${_testName} ${ARGN} )
ADD_TEST( ${_testName} ${CMAKE_CURRENT_BINARY_DIR}/${_testName} )
ENDMACRO( OPENSYNC_ADD_TEST )
| Allow serveral target libraries as last argument. Unfortunately only one testcase sources file is allowed for now... | Allow serveral target libraries as last argument.
Unfortunately only one testcase sources file is allowed for now...
git-svn-id: e31799a7ad59d6ea355ca047c69e0aee8a59fcd3@2714 53f5c7ee-bee3-0310-bbc5-ea0e15fffd5e
| CMake | lgpl-2.1 | ianmartin/opensync,ianmartin/opensync | cmake | ## Code Before:
ENABLE_TESTING()
INCLUDE( CTest )
MACRO( OPENSYNC_ADD_TEST _testName _testSource _targetLibraries )
ADD_EXECUTABLE( ${_testName} ${_testSource} )
TARGET_LINK_LIBRARIES( ${_testName} ${_targetLibraries} )
ADD_TEST( ${_testName} ${CMAKE_CURRENT_BINARY_DIR}/${_testName} )
ENDMACRO( OPENSYNC_ADD_TEST )
## Instruction:
Allow serveral target libraries as last argument.
Unfortunately only one testcase sources file is allowed for now...
git-svn-id: e31799a7ad59d6ea355ca047c69e0aee8a59fcd3@2714 53f5c7ee-bee3-0310-bbc5-ea0e15fffd5e
## Code After:
ENABLE_TESTING()
INCLUDE( CTest )
MACRO( OPENSYNC_ADD_TEST _testName _testSource )
ADD_EXECUTABLE( ${_testName} ${_testSource} )
TARGET_LINK_LIBRARIES( ${_testName} ${ARGN} )
ADD_TEST( ${_testName} ${CMAKE_CURRENT_BINARY_DIR}/${_testName} )
ENDMACRO( OPENSYNC_ADD_TEST )
| ENABLE_TESTING()
INCLUDE( CTest )
- MACRO( OPENSYNC_ADD_TEST _testName _testSource _targetLibraries )
? -----------------
+ MACRO( OPENSYNC_ADD_TEST _testName _testSource )
ADD_EXECUTABLE( ${_testName} ${_testSource} )
- TARGET_LINK_LIBRARIES( ${_testName} ${_targetLibraries} )
? ^^^^^^^^^^^^^^^^
+ TARGET_LINK_LIBRARIES( ${_testName} ${ARGN} )
? ^^^^
ADD_TEST( ${_testName} ${CMAKE_CURRENT_BINARY_DIR}/${_testName} )
ENDMACRO( OPENSYNC_ADD_TEST ) | 4 | 0.4 | 2 | 2 |
e7000fa09fc25238e7f1e154908484a6fb490800 | entrypoint.sh | entrypoint.sh | file_env() {
local var="$1"
local fileVar="${var}_FILE"
local def="${2:-}"
if [ "${!var:-}" ] && [ "${!fileVar:-}" ]; then
echo >&2 "error: both $var and $fileVar are set (but are exclusive)"
exit 1
fi
local val="$def"
if [ "${!var:-}" ]; then
val="${!var}"
elif [ "${!fileVar:-}" ]; then
val="$(< "${!fileVar}")"
fi
export "$var"="$val"
unset "$fileVar"
}
# File envs
file_env 'GOOGLE_CLIENT_ID'
file_env 'GOOGLE_CLIENT_SECRET'
file_env 'SLACK_SECRET'
file_env 'POSTGRES_PASSWORD'
file_env 'MENTOR_GOOGLE_CALENDAR'
file_env 'GOOGLE_API_KEY'
# Start node api
npm run bootstrap;
npm start;
| file_env() {
local var="$1"
local fileVar="${var}_FILE"
local def="${2:-}"
if [ "${!var:-}" ] && [ "${!fileVar:-}" ]; then
echo >&2 "error: both $var and $fileVar are set (but are exclusive)"
exit 1
fi
local val="$def"
if [ "${!var:-}" ]; then
val="${!var}"
elif [ "${!fileVar:-}" ]; then
val="$(< "${!fileVar}")"
fi
export "$var"="$val"
unset "$fileVar"
}
# File envs
file_env 'GOOGLE_CLIENT_ID'
file_env 'GOOGLE_CLIENT_SECRET'
file_env 'SLACK_SECRET'
file_env 'POSTGRES_PASSWORD'
file_env 'MENTOR_GOOGLE_CALENDAR'
file_env 'GOOGLE_API_KEY'
file_env 'MAILGUN_DOMAIN_DEV'
file_env 'MAILGUN_SECRET_DEV'
file_env 'MAILGUN_DOMAIN'
file_env 'MAILGUN_SECRET'
# Start node api
npm run bootstrap;
npm start;
| Add Mailgun Environment Variables to Entrypoint | Add Mailgun Environment Variables to Entrypoint | Shell | mit | rit-sse/node-api,rit-sse/node-api,rit-sse/node-api | shell | ## Code Before:
file_env() {
local var="$1"
local fileVar="${var}_FILE"
local def="${2:-}"
if [ "${!var:-}" ] && [ "${!fileVar:-}" ]; then
echo >&2 "error: both $var and $fileVar are set (but are exclusive)"
exit 1
fi
local val="$def"
if [ "${!var:-}" ]; then
val="${!var}"
elif [ "${!fileVar:-}" ]; then
val="$(< "${!fileVar}")"
fi
export "$var"="$val"
unset "$fileVar"
}
# File envs
file_env 'GOOGLE_CLIENT_ID'
file_env 'GOOGLE_CLIENT_SECRET'
file_env 'SLACK_SECRET'
file_env 'POSTGRES_PASSWORD'
file_env 'MENTOR_GOOGLE_CALENDAR'
file_env 'GOOGLE_API_KEY'
# Start node api
npm run bootstrap;
npm start;
## Instruction:
Add Mailgun Environment Variables to Entrypoint
## Code After:
file_env() {
local var="$1"
local fileVar="${var}_FILE"
local def="${2:-}"
if [ "${!var:-}" ] && [ "${!fileVar:-}" ]; then
echo >&2 "error: both $var and $fileVar are set (but are exclusive)"
exit 1
fi
local val="$def"
if [ "${!var:-}" ]; then
val="${!var}"
elif [ "${!fileVar:-}" ]; then
val="$(< "${!fileVar}")"
fi
export "$var"="$val"
unset "$fileVar"
}
# File envs
file_env 'GOOGLE_CLIENT_ID'
file_env 'GOOGLE_CLIENT_SECRET'
file_env 'SLACK_SECRET'
file_env 'POSTGRES_PASSWORD'
file_env 'MENTOR_GOOGLE_CALENDAR'
file_env 'GOOGLE_API_KEY'
file_env 'MAILGUN_DOMAIN_DEV'
file_env 'MAILGUN_SECRET_DEV'
file_env 'MAILGUN_DOMAIN'
file_env 'MAILGUN_SECRET'
# Start node api
npm run bootstrap;
npm start;
| file_env() {
local var="$1"
local fileVar="${var}_FILE"
local def="${2:-}"
if [ "${!var:-}" ] && [ "${!fileVar:-}" ]; then
echo >&2 "error: both $var and $fileVar are set (but are exclusive)"
exit 1
fi
local val="$def"
if [ "${!var:-}" ]; then
val="${!var}"
elif [ "${!fileVar:-}" ]; then
val="$(< "${!fileVar}")"
fi
export "$var"="$val"
unset "$fileVar"
}
# File envs
file_env 'GOOGLE_CLIENT_ID'
file_env 'GOOGLE_CLIENT_SECRET'
file_env 'SLACK_SECRET'
file_env 'POSTGRES_PASSWORD'
file_env 'MENTOR_GOOGLE_CALENDAR'
file_env 'GOOGLE_API_KEY'
+ file_env 'MAILGUN_DOMAIN_DEV'
+ file_env 'MAILGUN_SECRET_DEV'
+ file_env 'MAILGUN_DOMAIN'
+ file_env 'MAILGUN_SECRET'
# Start node api
npm run bootstrap;
npm start; | 4 | 0.137931 | 4 | 0 |
03a6a9e6a2e50fcb5d8a425e7a6275f30e8e95f1 | src/modules/search/components/UnitSuggestions.js | src/modules/search/components/UnitSuggestions.js | import React from 'react';
import {Link} from 'react-router';
import ObservationStatus from '../../unit/components/ObservationStatus';
import {getAttr, getUnitIconURL, getServiceName, getObservation} from '../../unit/helpers';
const UnitSuggestion = ({unit, ...rest}) =>
<Link to={`/unit/${unit.id}`} className="search-suggestions__result" {...rest}>
<div className="search-suggestions__result-icon">
<img src={getUnitIconURL(unit)} alt={getServiceName(unit)} />
</div>
<div className="search-suggestions__result-details">
<div className="search-suggestions__result-details__name">{getAttr(unit.name)}</div>
<ObservationStatus observation={getObservation(unit)}/>
</div>
</Link>;
export default UnitSuggestion;
| import React from 'react';
import {Link} from 'react-router';
import ObservationStatus from '../../unit/components/ObservationStatus';
import {getUnitIconURL, getServiceName, getObservation} from '../../unit/helpers';
const UnitSuggestion = ({unit, ...rest}, context) =>
<Link to={`/unit/${unit.id}`} className="search-suggestions__result" {...rest}>
<div className="search-suggestions__result-icon">
<img src={getUnitIconURL(unit)} alt={getServiceName(unit)} />
</div>
<div className="search-suggestions__result-details">
<div className="search-suggestions__result-details__name">{context.getAttr(unit.name)}</div>
<ObservationStatus observation={getObservation(unit)}/>
</div>
</Link>;
UnitSuggestion.contextTypes = {
getAttr: React.PropTypes.func
};
export default UnitSuggestion;
| Fix translations for unit fields in search suggestions | Fix translations for unit fields in search suggestions
| JavaScript | mit | nordsoftware/outdoors-sports-map,nordsoftware/outdoors-sports-map,nordsoftware/outdoors-sports-map | javascript | ## Code Before:
import React from 'react';
import {Link} from 'react-router';
import ObservationStatus from '../../unit/components/ObservationStatus';
import {getAttr, getUnitIconURL, getServiceName, getObservation} from '../../unit/helpers';
const UnitSuggestion = ({unit, ...rest}) =>
<Link to={`/unit/${unit.id}`} className="search-suggestions__result" {...rest}>
<div className="search-suggestions__result-icon">
<img src={getUnitIconURL(unit)} alt={getServiceName(unit)} />
</div>
<div className="search-suggestions__result-details">
<div className="search-suggestions__result-details__name">{getAttr(unit.name)}</div>
<ObservationStatus observation={getObservation(unit)}/>
</div>
</Link>;
export default UnitSuggestion;
## Instruction:
Fix translations for unit fields in search suggestions
## Code After:
import React from 'react';
import {Link} from 'react-router';
import ObservationStatus from '../../unit/components/ObservationStatus';
import {getUnitIconURL, getServiceName, getObservation} from '../../unit/helpers';
const UnitSuggestion = ({unit, ...rest}, context) =>
<Link to={`/unit/${unit.id}`} className="search-suggestions__result" {...rest}>
<div className="search-suggestions__result-icon">
<img src={getUnitIconURL(unit)} alt={getServiceName(unit)} />
</div>
<div className="search-suggestions__result-details">
<div className="search-suggestions__result-details__name">{context.getAttr(unit.name)}</div>
<ObservationStatus observation={getObservation(unit)}/>
</div>
</Link>;
UnitSuggestion.contextTypes = {
getAttr: React.PropTypes.func
};
export default UnitSuggestion;
| import React from 'react';
import {Link} from 'react-router';
import ObservationStatus from '../../unit/components/ObservationStatus';
- import {getAttr, getUnitIconURL, getServiceName, getObservation} from '../../unit/helpers';
? ---------
+ import {getUnitIconURL, getServiceName, getObservation} from '../../unit/helpers';
- const UnitSuggestion = ({unit, ...rest}) =>
+ const UnitSuggestion = ({unit, ...rest}, context) =>
? +++++++++
<Link to={`/unit/${unit.id}`} className="search-suggestions__result" {...rest}>
<div className="search-suggestions__result-icon">
<img src={getUnitIconURL(unit)} alt={getServiceName(unit)} />
</div>
<div className="search-suggestions__result-details">
- <div className="search-suggestions__result-details__name">{getAttr(unit.name)}</div>
+ <div className="search-suggestions__result-details__name">{context.getAttr(unit.name)}</div>
? ++++++++
<ObservationStatus observation={getObservation(unit)}/>
</div>
</Link>;
+ UnitSuggestion.contextTypes = {
+ getAttr: React.PropTypes.func
+ };
+
export default UnitSuggestion; | 10 | 0.588235 | 7 | 3 |
6ccd9722a6db66666a9400caf7d124c5ac25ab08 | post_pizza_slices.py | post_pizza_slices.py | import boto3, json
sdb = boto3.client('sdb')
def lambda_handler(data, context):
"""
Handler for posting data to SimpleDB.
Args:
data -- Data to be stored (Dictionary).
context -- AWS context for the request (Object).
"""
if data['Password'] and data['Password'] == 'INSERT PASSWORD':
try:
for person in ['Sharon', 'Ryan']:
old_slice_count = int(
sdb.get_attributes(
DomainName='pizza',
ItemName=person,
AttributeNames=['Slices']
)['Attributes'][0]['Value']
)
new_slice_count = old_slice_count + data[person]
sdb.put_attributes(
DomainName='pizza',
ItemName=person,
Attributes=[{
'Name': 'Slices',
'Value': str(new_slice_count),
'Replace': True
}]
)
return json.dumps({'Success': 'Your data was submitted!'})
except:
return json.dumps({'Error': 'Your data was not submitted.'})
else:
return json.dumps({
'Error': 'Ah ah ah, you didn\'t say the magic word.'
})
| import boto3
sdb = boto3.client('sdb')
def lambda_handler(data, context):
"""
Handler for posting data to SimpleDB.
Args:
data -- Data to be stored (Dictionary).
context -- AWS context for the request (Object).
"""
if data['Password'] and data['Password'] == 'INSERT PASSWORD':
try:
for person in ['Sharon', 'Ryan']:
old_slice_count = int(
sdb.get_attributes(
DomainName='pizza',
ItemName=person,
AttributeNames=['Slices']
)['Attributes'][0]['Value']
)
new_slice_count = old_slice_count + data[person]
sdb.put_attributes(
DomainName='pizza',
ItemName=person,
Attributes=[{
'Name': 'Slices',
'Value': str(new_slice_count),
'Replace': True
}]
)
return {'Success': 'Your data was submitted!'}
except:
return {'Error': 'Your data was not submitted.'}
else:
return {'Error': 'Ah ah ah, you didn\'t say the magic word.'}
| Remove JSON dependency in POST logic. | Remove JSON dependency in POST logic.
| Python | mit | ryandasher/pizza-tracker,ryandasher/pizza-tracker,ryandasher/pizza-tracker,ryandasher/pizza-tracker | python | ## Code Before:
import boto3, json
sdb = boto3.client('sdb')
def lambda_handler(data, context):
"""
Handler for posting data to SimpleDB.
Args:
data -- Data to be stored (Dictionary).
context -- AWS context for the request (Object).
"""
if data['Password'] and data['Password'] == 'INSERT PASSWORD':
try:
for person in ['Sharon', 'Ryan']:
old_slice_count = int(
sdb.get_attributes(
DomainName='pizza',
ItemName=person,
AttributeNames=['Slices']
)['Attributes'][0]['Value']
)
new_slice_count = old_slice_count + data[person]
sdb.put_attributes(
DomainName='pizza',
ItemName=person,
Attributes=[{
'Name': 'Slices',
'Value': str(new_slice_count),
'Replace': True
}]
)
return json.dumps({'Success': 'Your data was submitted!'})
except:
return json.dumps({'Error': 'Your data was not submitted.'})
else:
return json.dumps({
'Error': 'Ah ah ah, you didn\'t say the magic word.'
})
## Instruction:
Remove JSON dependency in POST logic.
## Code After:
import boto3
sdb = boto3.client('sdb')
def lambda_handler(data, context):
"""
Handler for posting data to SimpleDB.
Args:
data -- Data to be stored (Dictionary).
context -- AWS context for the request (Object).
"""
if data['Password'] and data['Password'] == 'INSERT PASSWORD':
try:
for person in ['Sharon', 'Ryan']:
old_slice_count = int(
sdb.get_attributes(
DomainName='pizza',
ItemName=person,
AttributeNames=['Slices']
)['Attributes'][0]['Value']
)
new_slice_count = old_slice_count + data[person]
sdb.put_attributes(
DomainName='pizza',
ItemName=person,
Attributes=[{
'Name': 'Slices',
'Value': str(new_slice_count),
'Replace': True
}]
)
return {'Success': 'Your data was submitted!'}
except:
return {'Error': 'Your data was not submitted.'}
else:
return {'Error': 'Ah ah ah, you didn\'t say the magic word.'}
| - import boto3, json
? ------
+ import boto3
sdb = boto3.client('sdb')
def lambda_handler(data, context):
"""
Handler for posting data to SimpleDB.
Args:
data -- Data to be stored (Dictionary).
context -- AWS context for the request (Object).
"""
if data['Password'] and data['Password'] == 'INSERT PASSWORD':
try:
for person in ['Sharon', 'Ryan']:
old_slice_count = int(
sdb.get_attributes(
DomainName='pizza',
ItemName=person,
AttributeNames=['Slices']
)['Attributes'][0]['Value']
)
new_slice_count = old_slice_count + data[person]
sdb.put_attributes(
DomainName='pizza',
ItemName=person,
Attributes=[{
'Name': 'Slices',
'Value': str(new_slice_count),
'Replace': True
}]
)
- return json.dumps({'Success': 'Your data was submitted!'})
? ----------- -
+ return {'Success': 'Your data was submitted!'}
except:
- return json.dumps({'Error': 'Your data was not submitted.'})
? ----------- -
+ return {'Error': 'Your data was not submitted.'}
else:
- return json.dumps({
- 'Error': 'Ah ah ah, you didn\'t say the magic word.'
? ^
+ return {'Error': 'Ah ah ah, you didn\'t say the magic word.'}
? ^^^^^^^^ +
- }) | 10 | 0.238095 | 4 | 6 |
836c4529f4d293edd9dec25666b7012882bce0f3 | .travis.yml | .travis.yml | language: ruby
script: "bundle exec rake run"
rvm:
- 1.9.3
gemfile:
- .travis/Gemfile.redis-gem-3.0
- .travis/Gemfile.redis-gem-master
env:
global:
- TIMEOUT=1
matrix:
- REDIS_BRANCH=2.4
- REDIS_BRANCH=2.6
- REDIS_BRANCH=2.8
- REDIS_BRANCH=unstable
| language: ruby
script: "bundle exec rake run"
rvm:
- 1.9.3
- 2.0.0
gemfile:
- .travis/Gemfile.redis-gem-3.0
- .travis/Gemfile.redis-gem-master
env:
global:
- TIMEOUT=1
matrix:
- REDIS_BRANCH=2.4
- REDIS_BRANCH=2.6
- REDIS_BRANCH=2.8
- REDIS_BRANCH=unstable
| Add Ruby 2.0.0 to the Travis testing matrix | Add Ruby 2.0.0 to the Travis testing matrix
| YAML | mit | yaauie/redis-copy | yaml | ## Code Before:
language: ruby
script: "bundle exec rake run"
rvm:
- 1.9.3
gemfile:
- .travis/Gemfile.redis-gem-3.0
- .travis/Gemfile.redis-gem-master
env:
global:
- TIMEOUT=1
matrix:
- REDIS_BRANCH=2.4
- REDIS_BRANCH=2.6
- REDIS_BRANCH=2.8
- REDIS_BRANCH=unstable
## Instruction:
Add Ruby 2.0.0 to the Travis testing matrix
## Code After:
language: ruby
script: "bundle exec rake run"
rvm:
- 1.9.3
- 2.0.0
gemfile:
- .travis/Gemfile.redis-gem-3.0
- .travis/Gemfile.redis-gem-master
env:
global:
- TIMEOUT=1
matrix:
- REDIS_BRANCH=2.4
- REDIS_BRANCH=2.6
- REDIS_BRANCH=2.8
- REDIS_BRANCH=unstable
| language: ruby
script: "bundle exec rake run"
rvm:
- 1.9.3
+ - 2.0.0
gemfile:
- .travis/Gemfile.redis-gem-3.0
- .travis/Gemfile.redis-gem-master
env:
global:
- TIMEOUT=1
matrix:
- REDIS_BRANCH=2.4
- REDIS_BRANCH=2.6
- REDIS_BRANCH=2.8
- REDIS_BRANCH=unstable | 1 | 0.0625 | 1 | 0 |
c003d92db170d3190de34953d72a3690f4663231 | conf/evolutions/default/62.sql | conf/evolutions/default/62.sql |
ALTER TABLE "party" DROP "duns";
ALTER TABLE audit."party" DROP "duns";
# --- !Downs
ALTER TABLE "party" ADD "duns" numeric(9);
ALTER TABLE "party" RENAME "url" TO "url_old";
ALTER TABLE "party" ADD "url" text;
UPDATE "party" SET url = url_old;
ALTER TABLE "party" DROP "url_old";
COMMENT ON COLUMN "party"."duns" IS 'http://en.wikipedia.org/wiki/DUNS';
ALTER TABLE audit."party" ADD "duns" numeric(9);
ALTER TABLE audit."party" RENAME "url" TO "url_old";
ALTER TABLE audit."party" ADD "url" text;
DO $do$ BEGIN
EXECUTE $$GRANT UPDATE ON TABLE audit.party TO $$ || quote_ident(current_user);;
UPDATE audit.party SET url = url_old;;
EXECUTE $$REVOKE UPDATE ON TABLE audit.party FROM $$ || quote_ident(current_user);;
END;; $do$;
ALTER TABLE audit."party" DROP "url_old";
|
ALTER TABLE "party" DROP "duns";
ALTER TABLE audit."party" DROP "duns";
# --- !Downs
ALTER TABLE "party" ADD "duns" numeric(9);
ALTER TABLE "party" RENAME "url" TO "url_old";
ALTER TABLE "party" ADD "url" text;
UPDATE "party" SET url = url_old;
ALTER TABLE "party" DROP "url_old";
COMMENT ON COLUMN "party"."duns" IS 'http://en.wikipedia.org/wiki/DUNS';
UPDATE party SET duns = 0 WHERE id > 0 AND NOT EXISTS (SELECT * FROM account WHERE account.id = party.id);
ALTER TABLE audit."party" ADD "duns" numeric(9);
ALTER TABLE audit."party" RENAME "url" TO "url_old";
ALTER TABLE audit."party" ADD "url" text;
DO $do$ BEGIN
EXECUTE $$GRANT UPDATE ON TABLE audit.party TO $$ || quote_ident(current_user);;
UPDATE audit.party SET url = url_old;;
EXECUTE $$REVOKE UPDATE ON TABLE audit.party FROM $$ || quote_ident(current_user);;
END;; $do$;
ALTER TABLE audit."party" DROP "url_old";
| Reset duns on down evolution | Reset duns on down evolution
Evolution invalidation. Duns values are lost, this just keeps
existence.
| SQL | agpl-3.0 | databrary/databrary,databrary/databrary,databrary/databrary,databrary/databrary | sql | ## Code Before:
ALTER TABLE "party" DROP "duns";
ALTER TABLE audit."party" DROP "duns";
# --- !Downs
ALTER TABLE "party" ADD "duns" numeric(9);
ALTER TABLE "party" RENAME "url" TO "url_old";
ALTER TABLE "party" ADD "url" text;
UPDATE "party" SET url = url_old;
ALTER TABLE "party" DROP "url_old";
COMMENT ON COLUMN "party"."duns" IS 'http://en.wikipedia.org/wiki/DUNS';
ALTER TABLE audit."party" ADD "duns" numeric(9);
ALTER TABLE audit."party" RENAME "url" TO "url_old";
ALTER TABLE audit."party" ADD "url" text;
DO $do$ BEGIN
EXECUTE $$GRANT UPDATE ON TABLE audit.party TO $$ || quote_ident(current_user);;
UPDATE audit.party SET url = url_old;;
EXECUTE $$REVOKE UPDATE ON TABLE audit.party FROM $$ || quote_ident(current_user);;
END;; $do$;
ALTER TABLE audit."party" DROP "url_old";
## Instruction:
Reset duns on down evolution
Evolution invalidation. Duns values are lost, this just keeps
existence.
## Code After:
ALTER TABLE "party" DROP "duns";
ALTER TABLE audit."party" DROP "duns";
# --- !Downs
ALTER TABLE "party" ADD "duns" numeric(9);
ALTER TABLE "party" RENAME "url" TO "url_old";
ALTER TABLE "party" ADD "url" text;
UPDATE "party" SET url = url_old;
ALTER TABLE "party" DROP "url_old";
COMMENT ON COLUMN "party"."duns" IS 'http://en.wikipedia.org/wiki/DUNS';
UPDATE party SET duns = 0 WHERE id > 0 AND NOT EXISTS (SELECT * FROM account WHERE account.id = party.id);
ALTER TABLE audit."party" ADD "duns" numeric(9);
ALTER TABLE audit."party" RENAME "url" TO "url_old";
ALTER TABLE audit."party" ADD "url" text;
DO $do$ BEGIN
EXECUTE $$GRANT UPDATE ON TABLE audit.party TO $$ || quote_ident(current_user);;
UPDATE audit.party SET url = url_old;;
EXECUTE $$REVOKE UPDATE ON TABLE audit.party FROM $$ || quote_ident(current_user);;
END;; $do$;
ALTER TABLE audit."party" DROP "url_old";
|
ALTER TABLE "party" DROP "duns";
ALTER TABLE audit."party" DROP "duns";
# --- !Downs
ALTER TABLE "party" ADD "duns" numeric(9);
ALTER TABLE "party" RENAME "url" TO "url_old";
ALTER TABLE "party" ADD "url" text;
UPDATE "party" SET url = url_old;
ALTER TABLE "party" DROP "url_old";
COMMENT ON COLUMN "party"."duns" IS 'http://en.wikipedia.org/wiki/DUNS';
+ UPDATE party SET duns = 0 WHERE id > 0 AND NOT EXISTS (SELECT * FROM account WHERE account.id = party.id);
+
ALTER TABLE audit."party" ADD "duns" numeric(9);
ALTER TABLE audit."party" RENAME "url" TO "url_old";
ALTER TABLE audit."party" ADD "url" text;
DO $do$ BEGIN
EXECUTE $$GRANT UPDATE ON TABLE audit.party TO $$ || quote_ident(current_user);;
UPDATE audit.party SET url = url_old;;
EXECUTE $$REVOKE UPDATE ON TABLE audit.party FROM $$ || quote_ident(current_user);;
END;; $do$;
ALTER TABLE audit."party" DROP "url_old";
| 2 | 0.083333 | 2 | 0 |
9f6e93f654126af110072d86cb924cebb29445f3 | README.md | README.md | delayed_job Cookbook
====================
Installs and configures delayed_job and monit config for DJ workers.
Requirements
------------
See metadata
Attributes
----------
See attributes
Usage
-----
#### delayed_job::default
Just include `delayed_job` in your node's `run_list`:
```json
{
"name":"my_node",
"run_list": [
"recipe[delayed_job]"
]
}
```
Contributing
------------
1. Fork the repository on Github
2. Create a named feature branch (like `add_component_x`)
3. Write you change
4. Write tests for your change (if applicable)
5. Run the tests, ensuring they all pass
6. Submit a Pull Request using Github
| delayed_job Cookbook
====================
Installs and configures delayed_job and monit config for DJ workers.
Requirements
------------
See metadata
Attributes
----------
See attributes
Usage
-----
#### delayed_job::default
Just include `delayed_job` in your node's `run_list`:
```json
{
"name":"my_node",
"run_list": [
"recipe[delayed_job]"
]
}
```
Contributing
------------
1. Fork the repository on Github
2. Create a named feature branch (like `add_component_x`)
3. Write you change
4. Write tests for your change (if applicable)
5. Run the tests, ensuring they all pass
6. Submit a Pull Request using Github
Brought to you by
-----------------
[](http://BigBinary.com)
| Add Brought to you by | Add Brought to you by
| Markdown | mit | bigbinary/boxy-delayed-job,bigbinary/boxy-delayed-job | markdown | ## Code Before:
delayed_job Cookbook
====================
Installs and configures delayed_job and monit config for DJ workers.
Requirements
------------
See metadata
Attributes
----------
See attributes
Usage
-----
#### delayed_job::default
Just include `delayed_job` in your node's `run_list`:
```json
{
"name":"my_node",
"run_list": [
"recipe[delayed_job]"
]
}
```
Contributing
------------
1. Fork the repository on Github
2. Create a named feature branch (like `add_component_x`)
3. Write you change
4. Write tests for your change (if applicable)
5. Run the tests, ensuring they all pass
6. Submit a Pull Request using Github
## Instruction:
Add Brought to you by
## Code After:
delayed_job Cookbook
====================
Installs and configures delayed_job and monit config for DJ workers.
Requirements
------------
See metadata
Attributes
----------
See attributes
Usage
-----
#### delayed_job::default
Just include `delayed_job` in your node's `run_list`:
```json
{
"name":"my_node",
"run_list": [
"recipe[delayed_job]"
]
}
```
Contributing
------------
1. Fork the repository on Github
2. Create a named feature branch (like `add_component_x`)
3. Write you change
4. Write tests for your change (if applicable)
5. Run the tests, ensuring they all pass
6. Submit a Pull Request using Github
Brought to you by
-----------------
[](http://BigBinary.com)
| delayed_job Cookbook
====================
Installs and configures delayed_job and monit config for DJ workers.
Requirements
------------
See metadata
Attributes
----------
See attributes
Usage
-----
#### delayed_job::default
Just include `delayed_job` in your node's `run_list`:
```json
{
"name":"my_node",
"run_list": [
"recipe[delayed_job]"
]
}
```
Contributing
------------
1. Fork the repository on Github
2. Create a named feature branch (like `add_component_x`)
3. Write you change
4. Write tests for your change (if applicable)
5. Run the tests, ensuring they all pass
6. Submit a Pull Request using Github
+
+ Brought to you by
+ -----------------
+
+ [](http://BigBinary.com) | 5 | 0.138889 | 5 | 0 |
2e21f02bff991c16c660504beb85957d5b2c872f | src/Rogue/View.elm | src/Rogue/View.elm | module Rogue.View where
import Color exposing (..)
import Graphics.Collage exposing (..)
import Graphics.Element exposing (..)
import String exposing (join)
import Text
import Array exposing (..)
import Rogue.Model exposing (..)
view : Game -> Element
view g = leftAligned (Text.monospace (Text.fromString (toString g.gameMap)))
toString : GameMap -> String
toString {board,start,currentPlayerLocation} =
let
rowifier =
(\row ->
Array.map
(\cell ->
if | isAt currentPlayerLocation cell -> "@"
| not <| isOpen cell -> "#"
| otherwise -> "."
)
row
|> toList
|> join ""
)
in
Array.map rowifier board
|> toList
|> join "\n" | module Rogue.View where
import Color exposing (..)
import Graphics.Collage exposing (..)
import Graphics.Element exposing (..)
import String exposing (join)
import Text
import Array exposing (..)
import Rogue.Model exposing (..)
view : Game -> Element
view g = viewGameMap g.gameMap
viewGameMap : GameMap -> Element
viewGameMap {board,start,currentPlayerLocation} =
let
rowifier =
(\row ->
Array.map
(\cell ->
if | isAt currentPlayerLocation cell -> person
| not <| isOpen cell -> barrier
| otherwise -> open
)
row
|> toList
|> flow right
)
in
Array.map rowifier board
|> toList
|> flow down
txt str =
Text.fromString str
|> Text.monospace
|> centered
person =
txt "@"
|> standardize
barrier =
square 16
|> filled red
|> (\sq -> collage 16 16 [sq])
|> standardize
open =
txt "."
|> standardize
standardize : Element -> Element
standardize el =
el
|> container 16 16 middle
--toString =
-- let
-- rowifier =
-- (\row ->
-- Array.map
-- (\cell ->
-- if | isAt currentPlayerLocation cell -> "@"
-- | not <| isOpen cell -> "#"
-- | otherwise -> "."
-- )
-- row
-- |> toList
-- |> join ""
-- )
-- in
-- Array.map rowifier board
-- |> toList
-- |> join "\n" | Change view code to use elements | Change view code to use elements
| Elm | mit | chendrix/elm-rogue,chendrix/elm-rogue,chendrix/elm-rogue | elm | ## Code Before:
module Rogue.View where
import Color exposing (..)
import Graphics.Collage exposing (..)
import Graphics.Element exposing (..)
import String exposing (join)
import Text
import Array exposing (..)
import Rogue.Model exposing (..)
view : Game -> Element
view g = leftAligned (Text.monospace (Text.fromString (toString g.gameMap)))
toString : GameMap -> String
toString {board,start,currentPlayerLocation} =
let
rowifier =
(\row ->
Array.map
(\cell ->
if | isAt currentPlayerLocation cell -> "@"
| not <| isOpen cell -> "#"
| otherwise -> "."
)
row
|> toList
|> join ""
)
in
Array.map rowifier board
|> toList
|> join "\n"
## Instruction:
Change view code to use elements
## Code After:
module Rogue.View where
import Color exposing (..)
import Graphics.Collage exposing (..)
import Graphics.Element exposing (..)
import String exposing (join)
import Text
import Array exposing (..)
import Rogue.Model exposing (..)
view : Game -> Element
view g = viewGameMap g.gameMap
viewGameMap : GameMap -> Element
viewGameMap {board,start,currentPlayerLocation} =
let
rowifier =
(\row ->
Array.map
(\cell ->
if | isAt currentPlayerLocation cell -> person
| not <| isOpen cell -> barrier
| otherwise -> open
)
row
|> toList
|> flow right
)
in
Array.map rowifier board
|> toList
|> flow down
txt str =
Text.fromString str
|> Text.monospace
|> centered
person =
txt "@"
|> standardize
barrier =
square 16
|> filled red
|> (\sq -> collage 16 16 [sq])
|> standardize
open =
txt "."
|> standardize
standardize : Element -> Element
standardize el =
el
|> container 16 16 middle
--toString =
-- let
-- rowifier =
-- (\row ->
-- Array.map
-- (\cell ->
-- if | isAt currentPlayerLocation cell -> "@"
-- | not <| isOpen cell -> "#"
-- | otherwise -> "."
-- )
-- row
-- |> toList
-- |> join ""
-- )
-- in
-- Array.map rowifier board
-- |> toList
-- |> join "\n" | module Rogue.View where
import Color exposing (..)
import Graphics.Collage exposing (..)
import Graphics.Element exposing (..)
import String exposing (join)
import Text
import Array exposing (..)
import Rogue.Model exposing (..)
view : Game -> Element
- view g = leftAligned (Text.monospace (Text.fromString (toString g.gameMap)))
+ view g = viewGameMap g.gameMap
- toString : GameMap -> String
+ viewGameMap : GameMap -> Element
- toString {board,start,currentPlayerLocation} =
? ^^^^^ ^^
+ viewGameMap {board,start,currentPlayerLocation} =
? ^ ^^^^^^^^^
let
rowifier =
(\row ->
Array.map
(\cell ->
- if | isAt currentPlayerLocation cell -> "@"
? ^^^
+ if | isAt currentPlayerLocation cell -> person
? ^^^^^^
- | not <| isOpen cell -> "#"
? ^^^
+ | not <| isOpen cell -> barrier
? ^^^^^^^
- | otherwise -> "."
? ^^^
+ | otherwise -> open
? ^^^^
)
row
|> toList
- |> join ""
+ |> flow right
)
in
Array.map rowifier board
|> toList
+ |> flow down
+
+
+ txt str =
+ Text.fromString str
+ |> Text.monospace
+ |> centered
+
+ person =
+ txt "@"
+ |> standardize
+
+ barrier =
+ square 16
+ |> filled red
+ |> (\sq -> collage 16 16 [sq])
+ |> standardize
+
+ open =
+ txt "."
+ |> standardize
+
+ standardize : Element -> Element
+ standardize el =
+ el
+ |> container 16 16 middle
+
+ --toString =
+ -- let
+ -- rowifier =
+ -- (\row ->
+ -- Array.map
+ -- (\cell ->
+ -- if | isAt currentPlayerLocation cell -> "@"
+ -- | not <| isOpen cell -> "#"
+ -- | otherwise -> "."
+ -- )
+ -- row
+ -- |> toList
+ -- |> join ""
+ -- )
+ -- in
+ -- Array.map rowifier board
+ -- |> toList
- |> join "\n"
+ -- |> join "\n"
? ++
| 60 | 1.818182 | 52 | 8 |
f333ec24ca83dc9246fede80e9aae7f6b1bb6f8b | requirements.dev.txt | requirements.dev.txt | coverage==4.0.3
django-nose==1.4.4
flake8==2.5.4
ipdb==0.9.0
ipython==4.1.2
nose==1.3.7
wheel==0.29.0
# tox
virtualenv==14.0.6
py==1.4.31
tox==2.3.1
# Sphinx
MarkupSafe==0.23
Jinja2==2.8
Pygments==2.1.2
Sphinx==1.3.6
# qa-all
check-manifest==0.31
pyroma==2.0.0
# Google tools - not Python3 compat
# transitfeed==1.2.15
| coverage==4.0.3
django-nose==1.4.4
flake8==2.5.4
ipdb==0.9.0
ipython==4.1.2
nose==1.3.7
wheel==0.29.0
twine==1.9.1
# tox
virtualenv==14.0.6
py==1.4.31
tox==2.3.1
# Sphinx
MarkupSafe==0.23
Jinja2==2.8
Pygments==2.1.2
Sphinx==1.3.6
# qa-all
check-manifest==0.35
pyroma==2.2
# Google tools - not Python3 compat
# transitfeed==1.2.15
| Update QA tools, add twine | Update QA tools, add twine
twine is a tool for uploading packages to PyPI
| Text | apache-2.0 | tulsawebdevs/django-multi-gtfs,tulsawebdevs/django-multi-gtfs | text | ## Code Before:
coverage==4.0.3
django-nose==1.4.4
flake8==2.5.4
ipdb==0.9.0
ipython==4.1.2
nose==1.3.7
wheel==0.29.0
# tox
virtualenv==14.0.6
py==1.4.31
tox==2.3.1
# Sphinx
MarkupSafe==0.23
Jinja2==2.8
Pygments==2.1.2
Sphinx==1.3.6
# qa-all
check-manifest==0.31
pyroma==2.0.0
# Google tools - not Python3 compat
# transitfeed==1.2.15
## Instruction:
Update QA tools, add twine
twine is a tool for uploading packages to PyPI
## Code After:
coverage==4.0.3
django-nose==1.4.4
flake8==2.5.4
ipdb==0.9.0
ipython==4.1.2
nose==1.3.7
wheel==0.29.0
twine==1.9.1
# tox
virtualenv==14.0.6
py==1.4.31
tox==2.3.1
# Sphinx
MarkupSafe==0.23
Jinja2==2.8
Pygments==2.1.2
Sphinx==1.3.6
# qa-all
check-manifest==0.35
pyroma==2.2
# Google tools - not Python3 compat
# transitfeed==1.2.15
| coverage==4.0.3
django-nose==1.4.4
flake8==2.5.4
ipdb==0.9.0
ipython==4.1.2
nose==1.3.7
wheel==0.29.0
+ twine==1.9.1
# tox
virtualenv==14.0.6
py==1.4.31
tox==2.3.1
# Sphinx
MarkupSafe==0.23
Jinja2==2.8
Pygments==2.1.2
Sphinx==1.3.6
# qa-all
- check-manifest==0.31
? ^
+ check-manifest==0.35
? ^
- pyroma==2.0.0
? ^^^
+ pyroma==2.2
? ^
# Google tools - not Python3 compat
# transitfeed==1.2.15 | 5 | 0.2 | 3 | 2 |
f1c91245a9e422f90594003c244cb6503c977281 | appveyor.yml | appveyor.yml | version: {build}
#---------------------------------#
# environment configuration #
#---------------------------------#
# Operating system (build VM template)
os: Windows Server 2012 R2
# clone directory
clone_folder: c:\projects\go-plus
# environment variables
environment:
GOPATH: c:\gopath
PATH: %PATH%;c:\gopath\bin
# scripts that run after cloning repository
install:
- cmd: mkdir c:\gopath
- cmd: go get github.com/golang/lint/golint
- cmd: go get code.google.com/p/go.tools/cmd/cover
- cmd: go get code.google.com/p/go.tools/cmd/goimports
- cmd: go get code.google.com/p/go.tools/cmd/vet
- cmd: go get code.google.com/p/go.tools/cmd/godoc
- cmd: cinst nodejs
- cmd: cinst atom
#---------------------------------#
# build configuration #
#---------------------------------#
# to run your custom scripts instead of automatic MSBuild
build_script:
- cmd: c:\projects\go-plus
- cmd: apm test
#---------------------------------#
# deployment configuration #
#---------------------------------#
# to disable deployment
deploy: off
| version: {build}
#---------------------------------#
# environment configuration #
#---------------------------------#
# Operating system (build VM template)
os: Windows Server 2012 R2
# clone directory
clone_folder: c:\projects\go-plus
# environment variables
environment:
GOPATH: c:\gopath
PATH: %PATH%;c:\gopath\bin
# scripts that run after cloning repository
install:
- cmd: mkdir c:\gopath
- cmd: go get github.com/golang/lint/golint
- cmd: go get code.google.com/p/go.tools/cmd/cover
- cmd: go get code.google.com/p/go.tools/cmd/goimports
- cmd: go get code.google.com/p/go.tools/cmd/vet
- cmd: go get code.google.com/p/go.tools/cmd/godoc
- cmd: cinst nodejs
- cmd: cinst atom
#---------------------------------#
# build configuration #
#---------------------------------#
# to run your custom scripts instead of automatic MSBuild
build_script: apm test
#---------------------------------#
# deployment configuration #
#---------------------------------#
# to disable deployment
deploy: off
| Fix Potential Bad build_script Format | Fix Potential Bad build_script Format
| YAML | apache-2.0 | danielchatfield/go-plus,alex/go-plus,rengawm/go-plus,ma314smith/go-plus,timesking/go-plus,crispinb/go-plus,alecthomas/go-plus,akutz/go-plus,alex/go-plus,crispinb/go-plus,rodumani/go-plus,Gurpartap/go-plus,greensnark/go-plus | yaml | ## Code Before:
version: {build}
#---------------------------------#
# environment configuration #
#---------------------------------#
# Operating system (build VM template)
os: Windows Server 2012 R2
# clone directory
clone_folder: c:\projects\go-plus
# environment variables
environment:
GOPATH: c:\gopath
PATH: %PATH%;c:\gopath\bin
# scripts that run after cloning repository
install:
- cmd: mkdir c:\gopath
- cmd: go get github.com/golang/lint/golint
- cmd: go get code.google.com/p/go.tools/cmd/cover
- cmd: go get code.google.com/p/go.tools/cmd/goimports
- cmd: go get code.google.com/p/go.tools/cmd/vet
- cmd: go get code.google.com/p/go.tools/cmd/godoc
- cmd: cinst nodejs
- cmd: cinst atom
#---------------------------------#
# build configuration #
#---------------------------------#
# to run your custom scripts instead of automatic MSBuild
build_script:
- cmd: c:\projects\go-plus
- cmd: apm test
#---------------------------------#
# deployment configuration #
#---------------------------------#
# to disable deployment
deploy: off
## Instruction:
Fix Potential Bad build_script Format
## Code After:
version: {build}
#---------------------------------#
# environment configuration #
#---------------------------------#
# Operating system (build VM template)
os: Windows Server 2012 R2
# clone directory
clone_folder: c:\projects\go-plus
# environment variables
environment:
GOPATH: c:\gopath
PATH: %PATH%;c:\gopath\bin
# scripts that run after cloning repository
install:
- cmd: mkdir c:\gopath
- cmd: go get github.com/golang/lint/golint
- cmd: go get code.google.com/p/go.tools/cmd/cover
- cmd: go get code.google.com/p/go.tools/cmd/goimports
- cmd: go get code.google.com/p/go.tools/cmd/vet
- cmd: go get code.google.com/p/go.tools/cmd/godoc
- cmd: cinst nodejs
- cmd: cinst atom
#---------------------------------#
# build configuration #
#---------------------------------#
# to run your custom scripts instead of automatic MSBuild
build_script: apm test
#---------------------------------#
# deployment configuration #
#---------------------------------#
# to disable deployment
deploy: off
| version: {build}
#---------------------------------#
# environment configuration #
#---------------------------------#
# Operating system (build VM template)
os: Windows Server 2012 R2
# clone directory
clone_folder: c:\projects\go-plus
# environment variables
environment:
GOPATH: c:\gopath
PATH: %PATH%;c:\gopath\bin
# scripts that run after cloning repository
install:
- cmd: mkdir c:\gopath
- cmd: go get github.com/golang/lint/golint
- cmd: go get code.google.com/p/go.tools/cmd/cover
- cmd: go get code.google.com/p/go.tools/cmd/goimports
- cmd: go get code.google.com/p/go.tools/cmd/vet
- cmd: go get code.google.com/p/go.tools/cmd/godoc
- cmd: cinst nodejs
- cmd: cinst atom
#---------------------------------#
# build configuration #
#---------------------------------#
# to run your custom scripts instead of automatic MSBuild
+ build_script: apm test
- build_script:
- - cmd: c:\projects\go-plus
- - cmd: apm test
#---------------------------------#
# deployment configuration #
#---------------------------------#
# to disable deployment
deploy: off | 4 | 0.093023 | 1 | 3 |
426058126a0a6e1f089a146424e1f9cd28d98293 | provisioning/roles/plugins/defaults/main.yml | provisioning/roles/plugins/defaults/main.yml | ---
plugin_dependencies:
- python-reportlab
pip_plugins:
# - "omero-webtagging-autotag"
- "omero-iviewer"
- "omero-figure"
omero_plugin_names:
# - "omero_webtagging_autotag"
- "omero_iviewer"
- "omero_figure"
python_lib_path: "/usr/lib/python2.7/site-packages"
| ---
pip_dependencies:
- reportlab
- markdown
pip_plugins:
# - "omero-webtagging-autotag"
- "omero-iviewer"
- "omero-figure==3.2.1"
omero_plugin_names:
# - "omero_webtagging_autotag"
- "omero_iviewer"
- "omero_figure"
python_lib_path: "/usr/lib/python2.7/site-packages"
| Install older version of omero-figure (newer on e did not work with server v5.4.9) | Install older version of omero-figure (newer on e did not work with server v5.4.9)
| YAML | mit | JIC-CSB/omero-ansible | yaml | ## Code Before:
---
plugin_dependencies:
- python-reportlab
pip_plugins:
# - "omero-webtagging-autotag"
- "omero-iviewer"
- "omero-figure"
omero_plugin_names:
# - "omero_webtagging_autotag"
- "omero_iviewer"
- "omero_figure"
python_lib_path: "/usr/lib/python2.7/site-packages"
## Instruction:
Install older version of omero-figure (newer on e did not work with server v5.4.9)
## Code After:
---
pip_dependencies:
- reportlab
- markdown
pip_plugins:
# - "omero-webtagging-autotag"
- "omero-iviewer"
- "omero-figure==3.2.1"
omero_plugin_names:
# - "omero_webtagging_autotag"
- "omero_iviewer"
- "omero_figure"
python_lib_path: "/usr/lib/python2.7/site-packages"
| ---
- plugin_dependencies:
? --- ^
+ pip_dependencies:
? ^
- - python-reportlab
? -------
+ - reportlab
+ - markdown
pip_plugins:
# - "omero-webtagging-autotag"
- "omero-iviewer"
- - "omero-figure"
+ - "omero-figure==3.2.1"
? +++++++
omero_plugin_names:
# - "omero_webtagging_autotag"
- "omero_iviewer"
- "omero_figure"
python_lib_path: "/usr/lib/python2.7/site-packages" | 7 | 0.4375 | 4 | 3 |
3d17b501d8857842788f1925a7ef91ea7e8513e5 | assets/js/components/ErrorHandler/index.js | assets/js/components/ErrorHandler/index.js | /**
* External dependencies
*/
import PropTypes from 'prop-types';
/**
* WordPress dependencies
*/
import { Component } from '@wordpress/element';
import { __ } from '@wordpress/i18n';
/**
* Internal dependencies
*/
import Notification from 'GoogleComponents/notifications/notification';
class ErrorHandler extends Component {
constructor( props ) {
super( props );
this.state = {
error: null,
info: null,
};
}
componentDidCatch( error, info ) {
global.console.error( 'Caught an error:', error, info );
this.setState( { error, info } );
}
render() {
const { children } = this.props;
const { error, info } = this.state;
// If there is no caught error, render the children components normally.
if ( ! error ) {
return children;
}
return (
<Notification
id={ 'googlesitekit-error' }
key={ 'googlesitekit-error' }
title={ __( 'Site Kit encountered an error', 'google-site-kit' ) }
description={ <code>{ error.message }</code> }
dismiss={ '' }
isDismissable={ false }
format="small"
type="win-error"
>
<pre>{ info.componentStack }</pre>
</Notification>
);
}
}
ErrorHandler.defaultProps = {};
ErrorHandler.propTypes = {
/** @ignore */
children: PropTypes.node.isRequired,
};
export default ErrorHandler;
| /**
* External dependencies
*/
import PropTypes from 'prop-types';
/**
* WordPress dependencies
*/
import { Component } from '@wordpress/element';
import { __ } from '@wordpress/i18n';
/**
* Internal dependencies
*/
import Notification from 'GoogleComponents/notifications/notification';
class ErrorHandler extends Component {
constructor( props ) {
super( props );
this.state = {
error: null,
info: null,
};
}
componentDidCatch( error, info ) {
global.console.error( 'Caught an error:', error, info );
this.setState( { error, info } );
}
render() {
const { children } = this.props;
const { error, info } = this.state;
// If there is no caught error, render the children components normally.
if ( ! error ) {
return children;
}
return (
<Notification
id="googlesitekit-error"
title={ __( 'Site Kit encountered an error', 'google-site-kit' ) }
description={ <code>{ error.message }</code> }
isDismissable={ false }
format="small"
type="win-error"
>
<pre>{ info.componentStack }</pre>
</Notification>
);
}
}
ErrorHandler.defaultProps = {};
ErrorHandler.propTypes = {
/** @ignore */
children: PropTypes.node.isRequired,
};
export default ErrorHandler;
| Tidy up strings in ErrorHandler. | Tidy up strings in ErrorHandler.
| JavaScript | apache-2.0 | google/site-kit-wp,google/site-kit-wp,google/site-kit-wp,google/site-kit-wp | javascript | ## Code Before:
/**
* External dependencies
*/
import PropTypes from 'prop-types';
/**
* WordPress dependencies
*/
import { Component } from '@wordpress/element';
import { __ } from '@wordpress/i18n';
/**
* Internal dependencies
*/
import Notification from 'GoogleComponents/notifications/notification';
class ErrorHandler extends Component {
constructor( props ) {
super( props );
this.state = {
error: null,
info: null,
};
}
componentDidCatch( error, info ) {
global.console.error( 'Caught an error:', error, info );
this.setState( { error, info } );
}
render() {
const { children } = this.props;
const { error, info } = this.state;
// If there is no caught error, render the children components normally.
if ( ! error ) {
return children;
}
return (
<Notification
id={ 'googlesitekit-error' }
key={ 'googlesitekit-error' }
title={ __( 'Site Kit encountered an error', 'google-site-kit' ) }
description={ <code>{ error.message }</code> }
dismiss={ '' }
isDismissable={ false }
format="small"
type="win-error"
>
<pre>{ info.componentStack }</pre>
</Notification>
);
}
}
ErrorHandler.defaultProps = {};
ErrorHandler.propTypes = {
/** @ignore */
children: PropTypes.node.isRequired,
};
export default ErrorHandler;
## Instruction:
Tidy up strings in ErrorHandler.
## Code After:
/**
* External dependencies
*/
import PropTypes from 'prop-types';
/**
* WordPress dependencies
*/
import { Component } from '@wordpress/element';
import { __ } from '@wordpress/i18n';
/**
* Internal dependencies
*/
import Notification from 'GoogleComponents/notifications/notification';
class ErrorHandler extends Component {
constructor( props ) {
super( props );
this.state = {
error: null,
info: null,
};
}
componentDidCatch( error, info ) {
global.console.error( 'Caught an error:', error, info );
this.setState( { error, info } );
}
render() {
const { children } = this.props;
const { error, info } = this.state;
// If there is no caught error, render the children components normally.
if ( ! error ) {
return children;
}
return (
<Notification
id="googlesitekit-error"
title={ __( 'Site Kit encountered an error', 'google-site-kit' ) }
description={ <code>{ error.message }</code> }
isDismissable={ false }
format="small"
type="win-error"
>
<pre>{ info.componentStack }</pre>
</Notification>
);
}
}
ErrorHandler.defaultProps = {};
ErrorHandler.propTypes = {
/** @ignore */
children: PropTypes.node.isRequired,
};
export default ErrorHandler;
| /**
* External dependencies
*/
import PropTypes from 'prop-types';
/**
* WordPress dependencies
*/
import { Component } from '@wordpress/element';
import { __ } from '@wordpress/i18n';
/**
* Internal dependencies
*/
import Notification from 'GoogleComponents/notifications/notification';
class ErrorHandler extends Component {
constructor( props ) {
super( props );
this.state = {
error: null,
info: null,
};
}
componentDidCatch( error, info ) {
global.console.error( 'Caught an error:', error, info );
this.setState( { error, info } );
}
render() {
const { children } = this.props;
const { error, info } = this.state;
// If there is no caught error, render the children components normally.
if ( ! error ) {
return children;
}
return (
<Notification
- id={ 'googlesitekit-error' }
? ^^^ ^^^
+ id="googlesitekit-error"
? ^ ^
- key={ 'googlesitekit-error' }
title={ __( 'Site Kit encountered an error', 'google-site-kit' ) }
description={ <code>{ error.message }</code> }
- dismiss={ '' }
isDismissable={ false }
format="small"
type="win-error"
>
<pre>{ info.componentStack }</pre>
</Notification>
);
}
}
ErrorHandler.defaultProps = {};
ErrorHandler.propTypes = {
/** @ignore */
children: PropTypes.node.isRequired,
};
export default ErrorHandler; | 4 | 0.060606 | 1 | 3 |
9483db85d24215a584dcca862b023702a2a9902d | package.json | package.json | {
"name": "win-clipboard",
"version": "0.0.3",
"description": "Clipboard access for Windows.",
"homepage": "",
"author": {
"name": "Marek Lewandowski",
"email": "code@mlewandowski.com",
"url": ""
},
"files": [
"lib"
],
"main": "lib/index.js",
"keywords": [
"addon"
],
"devDependencies": {
"chai": "^3.5.0",
"clipboardy": "^1.1.1",
"eslint": "^3.1.1",
"eslint-config-mlewand-node": "^0.1.0",
"gulp": "^3.9.0",
"gulp-eslint": "^3.0.1",
"gulp-exclude-gitignore": "^1.0.0",
"gulp-istanbul": "^1.0.0",
"gulp-line-ending-corrector": "^1.0.1",
"gulp-mocha": "^3.0.1",
"gulp-nsp": "^2.1.0",
"gulp-plumber": "^1.0.0"
},
"repository": "mlewand/win-clipboard",
"scripts": {
"prepublish": "gulp prepublish",
"test": "gulp"
},
"license": "MIT",
"dependencies": {
"bindings": "^1.2.1",
"detect-newline": "^2.1.0",
"iconv-lite": "^0.4.16",
"nan": "^2.6.2"
}
}
| {
"name": "win-clipboard",
"version": "0.0.3",
"description": "Clipboard access for Windows.",
"homepage": "",
"author": {
"name": "Marek Lewandowski",
"email": "code@mlewandowski.com",
"url": ""
},
"main": "lib/index.js",
"keywords": [
"addon"
],
"devDependencies": {
"chai": "^3.5.0",
"clipboardy": "^1.1.1",
"eslint": "^3.1.1",
"eslint-config-mlewand-node": "^0.1.0",
"gulp": "^3.9.0",
"gulp-eslint": "^3.0.1",
"gulp-exclude-gitignore": "^1.0.0",
"gulp-istanbul": "^1.0.0",
"gulp-line-ending-corrector": "^1.0.1",
"gulp-mocha": "^3.0.1",
"gulp-nsp": "^2.1.0",
"gulp-plumber": "^1.0.0"
},
"repository": "mlewand/win-clipboard",
"scripts": {
"prepublish": "gulp prepublish",
"test": "gulp"
},
"license": "MIT",
"dependencies": {
"bindings": "^1.2.1",
"detect-newline": "^2.1.0",
"iconv-lite": "^0.4.16",
"nan": "^2.6.2"
}
}
| Remove explicit files array, what caused binding gyp not to be included. | Remove explicit files array, what caused binding gyp not to be included.
| JSON | mit | mlewand/win-clipboard,mlewand/win-clipboard,mlewand/win-clipboard | json | ## Code Before:
{
"name": "win-clipboard",
"version": "0.0.3",
"description": "Clipboard access for Windows.",
"homepage": "",
"author": {
"name": "Marek Lewandowski",
"email": "code@mlewandowski.com",
"url": ""
},
"files": [
"lib"
],
"main": "lib/index.js",
"keywords": [
"addon"
],
"devDependencies": {
"chai": "^3.5.0",
"clipboardy": "^1.1.1",
"eslint": "^3.1.1",
"eslint-config-mlewand-node": "^0.1.0",
"gulp": "^3.9.0",
"gulp-eslint": "^3.0.1",
"gulp-exclude-gitignore": "^1.0.0",
"gulp-istanbul": "^1.0.0",
"gulp-line-ending-corrector": "^1.0.1",
"gulp-mocha": "^3.0.1",
"gulp-nsp": "^2.1.0",
"gulp-plumber": "^1.0.0"
},
"repository": "mlewand/win-clipboard",
"scripts": {
"prepublish": "gulp prepublish",
"test": "gulp"
},
"license": "MIT",
"dependencies": {
"bindings": "^1.2.1",
"detect-newline": "^2.1.0",
"iconv-lite": "^0.4.16",
"nan": "^2.6.2"
}
}
## Instruction:
Remove explicit files array, what caused binding gyp not to be included.
## Code After:
{
"name": "win-clipboard",
"version": "0.0.3",
"description": "Clipboard access for Windows.",
"homepage": "",
"author": {
"name": "Marek Lewandowski",
"email": "code@mlewandowski.com",
"url": ""
},
"main": "lib/index.js",
"keywords": [
"addon"
],
"devDependencies": {
"chai": "^3.5.0",
"clipboardy": "^1.1.1",
"eslint": "^3.1.1",
"eslint-config-mlewand-node": "^0.1.0",
"gulp": "^3.9.0",
"gulp-eslint": "^3.0.1",
"gulp-exclude-gitignore": "^1.0.0",
"gulp-istanbul": "^1.0.0",
"gulp-line-ending-corrector": "^1.0.1",
"gulp-mocha": "^3.0.1",
"gulp-nsp": "^2.1.0",
"gulp-plumber": "^1.0.0"
},
"repository": "mlewand/win-clipboard",
"scripts": {
"prepublish": "gulp prepublish",
"test": "gulp"
},
"license": "MIT",
"dependencies": {
"bindings": "^1.2.1",
"detect-newline": "^2.1.0",
"iconv-lite": "^0.4.16",
"nan": "^2.6.2"
}
}
| {
"name": "win-clipboard",
"version": "0.0.3",
"description": "Clipboard access for Windows.",
"homepage": "",
"author": {
"name": "Marek Lewandowski",
"email": "code@mlewandowski.com",
"url": ""
},
- "files": [
- "lib"
- ],
"main": "lib/index.js",
"keywords": [
"addon"
],
"devDependencies": {
"chai": "^3.5.0",
"clipboardy": "^1.1.1",
"eslint": "^3.1.1",
"eslint-config-mlewand-node": "^0.1.0",
"gulp": "^3.9.0",
"gulp-eslint": "^3.0.1",
"gulp-exclude-gitignore": "^1.0.0",
"gulp-istanbul": "^1.0.0",
"gulp-line-ending-corrector": "^1.0.1",
"gulp-mocha": "^3.0.1",
"gulp-nsp": "^2.1.0",
"gulp-plumber": "^1.0.0"
},
"repository": "mlewand/win-clipboard",
"scripts": {
"prepublish": "gulp prepublish",
"test": "gulp"
},
"license": "MIT",
"dependencies": {
"bindings": "^1.2.1",
"detect-newline": "^2.1.0",
"iconv-lite": "^0.4.16",
"nan": "^2.6.2"
}
} | 3 | 0.068182 | 0 | 3 |
070280021d9091f7313e4312bb5da9be5cb947c2 | docs/content/documentation/getting-started/installation.md | docs/content/documentation/getting-started/installation.md | +++
title = "Installation"
weight = 1
+++
Gutenberg provides pre-built binaries for macOS, Linux and Windows on the
[GitHub release page](https://github.com/Keats/gutenberg/releases).
## macOS
Gutenberg is available on [Brew](https://brew.sh):
```bash
$ brew install gutenberg
```
## Windows
Gutenberg is available on [Scoop](http://scoop.sh):
```bash
$ scoop install gutenberg
```
## Arch Linux
Use your favourite AUR helper to install the `gutenberg-bin` package.
```bash
$ yaourt -S gutenberg-bin
```
## Void Linux
From the terminal, run the following command:
```bash
$ xbps-install -S gutenberg
```
## From source
To build it from source, you will need to have Git, [Rust and Cargo](https://www.rust-lang.org/)
installed.
From a terminal, you can now run the following command:
```bash
$ cargo build --release
```
The binary will be available in the `target/release` folder.
| +++
title = "Installation"
weight = 1
+++
Gutenberg provides pre-built binaries for macOS, Linux and Windows on the
[GitHub release page](https://github.com/Keats/gutenberg/releases).
## macOS
Gutenberg is available on [Brew](https://brew.sh):
```bash
$ brew install gutenberg
```
## Windows
Gutenberg is available on [Scoop](http://scoop.sh):
```bash
$ scoop install gutenberg
```
And [Chocolatey](https://chocolatey.org/):
```bash
$ choco install gutenberg
```
## Arch Linux
Use your favourite AUR helper to install the `gutenberg-bin` package.
```bash
$ yaourt -S gutenberg-bin
```
## Void Linux
From the terminal, run the following command:
```bash
$ xbps-install -S gutenberg
```
## From source
To build it from source, you will need to have Git, [Rust and Cargo](https://www.rust-lang.org/)
installed.
From a terminal, you can now run the following command:
```bash
$ cargo build --release
```
The binary will be available in the `target/release` folder.
| Add info about chocolatey package | Add info about chocolatey package | Markdown | mit | Keats/gutenberg,Keats/gutenberg,Keats/gutenberg,Keats/gutenberg,Keats/gutenberg | markdown | ## Code Before:
+++
title = "Installation"
weight = 1
+++
Gutenberg provides pre-built binaries for macOS, Linux and Windows on the
[GitHub release page](https://github.com/Keats/gutenberg/releases).
## macOS
Gutenberg is available on [Brew](https://brew.sh):
```bash
$ brew install gutenberg
```
## Windows
Gutenberg is available on [Scoop](http://scoop.sh):
```bash
$ scoop install gutenberg
```
## Arch Linux
Use your favourite AUR helper to install the `gutenberg-bin` package.
```bash
$ yaourt -S gutenberg-bin
```
## Void Linux
From the terminal, run the following command:
```bash
$ xbps-install -S gutenberg
```
## From source
To build it from source, you will need to have Git, [Rust and Cargo](https://www.rust-lang.org/)
installed.
From a terminal, you can now run the following command:
```bash
$ cargo build --release
```
The binary will be available in the `target/release` folder.
## Instruction:
Add info about chocolatey package
## Code After:
+++
title = "Installation"
weight = 1
+++
Gutenberg provides pre-built binaries for macOS, Linux and Windows on the
[GitHub release page](https://github.com/Keats/gutenberg/releases).
## macOS
Gutenberg is available on [Brew](https://brew.sh):
```bash
$ brew install gutenberg
```
## Windows
Gutenberg is available on [Scoop](http://scoop.sh):
```bash
$ scoop install gutenberg
```
And [Chocolatey](https://chocolatey.org/):
```bash
$ choco install gutenberg
```
## Arch Linux
Use your favourite AUR helper to install the `gutenberg-bin` package.
```bash
$ yaourt -S gutenberg-bin
```
## Void Linux
From the terminal, run the following command:
```bash
$ xbps-install -S gutenberg
```
## From source
To build it from source, you will need to have Git, [Rust and Cargo](https://www.rust-lang.org/)
installed.
From a terminal, you can now run the following command:
```bash
$ cargo build --release
```
The binary will be available in the `target/release` folder.
| +++
title = "Installation"
weight = 1
+++
Gutenberg provides pre-built binaries for macOS, Linux and Windows on the
[GitHub release page](https://github.com/Keats/gutenberg/releases).
## macOS
Gutenberg is available on [Brew](https://brew.sh):
```bash
$ brew install gutenberg
```
## Windows
Gutenberg is available on [Scoop](http://scoop.sh):
```bash
$ scoop install gutenberg
+ ```
+
+ And [Chocolatey](https://chocolatey.org/):
+
+ ```bash
+ $ choco install gutenberg
```
## Arch Linux
Use your favourite AUR helper to install the `gutenberg-bin` package.
```bash
$ yaourt -S gutenberg-bin
```
## Void Linux
From the terminal, run the following command:
```bash
$ xbps-install -S gutenberg
```
## From source
To build it from source, you will need to have Git, [Rust and Cargo](https://www.rust-lang.org/)
installed.
From a terminal, you can now run the following command:
```bash
$ cargo build --release
```
The binary will be available in the `target/release` folder. | 6 | 0.117647 | 6 | 0 |
bad6d2418c5481d3a58f702ab0983299d68cea4e | README.md | README.md |
It helps you to generate a pattern library from UI components you develop for
your jekyll website.
## Install
1. Download the project zip file.
2. Copy/paste files (or directories if they don't exist yet).
## Front-matter variables
Here is a list of special variables recognised by the plugin.
### Pattern variable
**As for posts, add any other variables in the Front-matter and use them in
your layout.**
Generated variables :
- `id`: The pattern filename without the `.html` extension.
- `markup`: The pattern HTML markup.
- `markup_escaped`: The HTML escaped version of the markup. Its usefull for
`<code>` blocks.
The following variables are recognised but not required.
`section`: This variable points out a section which the pattern belongs to,
and add the pattern to the page sections list.
### Page variables
Generated variables :
- `page.patterns`: The list of patterns
- `page.sections`: The list of patterns ordered by sections. Work as native
posts categories.
- `page.title`: The page title specified in the LibraryPage constructor use.
## To do
- [ ] Review the Pattern.destination method.
- [ ] Add a better layout |
It helps you to generate a pattern library from UI components you develop for
your jekyll website.
## Install
1. Download the project zip file.
2. Copy/paste files (or directories if they don't exist yet).
## Front-matter variables
Here is a list of special variables recognised by the plugin.
### Pattern variable
**As for posts, add any other variables in the Front-matter and use them in
your layout.**
Generated variables :
`id`
: The pattern filename without the `.html` extension.
- `markup`: The pattern HTML markup.
- `markup_escaped`: The HTML escaped version of the markup. Its usefull for
`<code>` blocks.
The following variables are recognised but not required.
`section`: This variable points out a section which the pattern belongs to,
and add the pattern to the page sections list.
### Page variables
Generated variables :
- `page.patterns`: The list of patterns
- `page.sections`: The list of patterns ordered by sections. Work as native
posts categories.
- `page.title`: The page title specified in the LibraryPage constructor use.
## To do
- Review the Pattern.destination method.
- Add a better layout
| Test <dl> github markdown exists ? | Test <dl> github markdown exists ? | Markdown | mit | adrienchretien/jekyll-pattern-library | markdown | ## Code Before:
It helps you to generate a pattern library from UI components you develop for
your jekyll website.
## Install
1. Download the project zip file.
2. Copy/paste files (or directories if they don't exist yet).
## Front-matter variables
Here is a list of special variables recognised by the plugin.
### Pattern variable
**As for posts, add any other variables in the Front-matter and use them in
your layout.**
Generated variables :
- `id`: The pattern filename without the `.html` extension.
- `markup`: The pattern HTML markup.
- `markup_escaped`: The HTML escaped version of the markup. Its usefull for
`<code>` blocks.
The following variables are recognised but not required.
`section`: This variable points out a section which the pattern belongs to,
and add the pattern to the page sections list.
### Page variables
Generated variables :
- `page.patterns`: The list of patterns
- `page.sections`: The list of patterns ordered by sections. Work as native
posts categories.
- `page.title`: The page title specified in the LibraryPage constructor use.
## To do
- [ ] Review the Pattern.destination method.
- [ ] Add a better layout
## Instruction:
Test <dl> github markdown exists ?
## Code After:
It helps you to generate a pattern library from UI components you develop for
your jekyll website.
## Install
1. Download the project zip file.
2. Copy/paste files (or directories if they don't exist yet).
## Front-matter variables
Here is a list of special variables recognised by the plugin.
### Pattern variable
**As for posts, add any other variables in the Front-matter and use them in
your layout.**
Generated variables :
`id`
: The pattern filename without the `.html` extension.
- `markup`: The pattern HTML markup.
- `markup_escaped`: The HTML escaped version of the markup. Its usefull for
`<code>` blocks.
The following variables are recognised but not required.
`section`: This variable points out a section which the pattern belongs to,
and add the pattern to the page sections list.
### Page variables
Generated variables :
- `page.patterns`: The list of patterns
- `page.sections`: The list of patterns ordered by sections. Work as native
posts categories.
- `page.title`: The page title specified in the LibraryPage constructor use.
## To do
- Review the Pattern.destination method.
- Add a better layout
|
It helps you to generate a pattern library from UI components you develop for
your jekyll website.
## Install
1. Download the project zip file.
2. Copy/paste files (or directories if they don't exist yet).
## Front-matter variables
Here is a list of special variables recognised by the plugin.
### Pattern variable
**As for posts, add any other variables in the Front-matter and use them in
your layout.**
Generated variables :
+ `id`
- - `id`: The pattern filename without the `.html` extension.
? ^ ^^^^^
+ : The pattern filename without the `.html` extension.
? ^ ^
- `markup`: The pattern HTML markup.
- `markup_escaped`: The HTML escaped version of the markup. Its usefull for
`<code>` blocks.
The following variables are recognised but not required.
`section`: This variable points out a section which the pattern belongs to,
and add the pattern to the page sections list.
### Page variables
Generated variables :
- `page.patterns`: The list of patterns
- `page.sections`: The list of patterns ordered by sections. Work as native
posts categories.
- `page.title`: The page title specified in the LibraryPage constructor use.
## To do
- - [ ] Review the Pattern.destination method.
? ----
+ - Review the Pattern.destination method.
- - [ ] Add a better layout
? ----
+ - Add a better layout | 7 | 0.162791 | 4 | 3 |
7eb0fcb6f42ee97e35c5a1c4104a277bf12ad0d9 | server/operations.js | server/operations.js | // Dependencies
var CaptchaPng = require("captchapng");
// Sessions
var sessions = {};
// Captcha configuration
var serverConfig = {
width: 100,
height: 30
};
// Get configuration
M.emit("captcha.getConfig", function (c) {
serverConfig = c;
});
// Verify captcha
M.on("captcha.verify", function (link, answer, callback) {
var sid = link.session && link.session._sid;
callback(answer === sessions[sid]);
});
/**
* captcha
* Serves the captcha image
*
* @name captcha
* @function
* @param {Object} link Mono link object
* @return
*/
exports.captcha = function (link) {
var res = link.res;
// Generate number and store it in sessions cache
var number = parseInt(Math.random() * 9000 + 1000);
if (!link.session || !link.session._sid) {
sessions[link.session._sid] = number;
}
var cap = new CaptchaPng(serverConfig.width, serverConfig.height, number);
var img = cap.getBase64();
var imgBase64 = new Buffer(img, "base64");
res.writeHead(200, {
"Content-Type": "image/png"
});
res.end(imgBase64);
};
| // Dependencies
var CaptchaPng = require("captchapng");
// Sessions
var sessions = {};
// Captcha configuration
var serverConfig = {
width: 100,
height: 30
};
// Get configuration
M.emit("captcha.getConfig", function (c) {
serverConfig = c;
});
// Verify captcha
M.on("captcha.verify", function (link, answer, callback) {
var sid = link.session && link.session._sid;
callback(answer === sessions[sid]);
});
/**
* captcha
* Serves the captcha image
*
* @name captcha
* @function
* @param {Object} link Mono link object
* @return
*/
exports.captcha = function (link) {
var res = link.res;
// Generate number and store it in sessions cache
var number = parseInt(Math.random() * 9000 + 1000);
if (!link.session || !link.session._sid) {
sessions[link.session._sid] = number;
}
var cap = new CaptchaPng(serverConfig.width, serverConfig.height, number);
cap.color(0, 100, 0, 0);
cap.color(80, 80, 80, 255);
var img = cap.getBase64();
var imgBase64 = new Buffer(img, "base64");
res.writeHead(200, {
"Content-Type": "image/png"
});
res.end(imgBase64);
};
| Add background and text color | Add background and text color
| JavaScript | mit | jillix/captcha | javascript | ## Code Before:
// Dependencies
var CaptchaPng = require("captchapng");
// Sessions
var sessions = {};
// Captcha configuration
var serverConfig = {
width: 100,
height: 30
};
// Get configuration
M.emit("captcha.getConfig", function (c) {
serverConfig = c;
});
// Verify captcha
M.on("captcha.verify", function (link, answer, callback) {
var sid = link.session && link.session._sid;
callback(answer === sessions[sid]);
});
/**
* captcha
* Serves the captcha image
*
* @name captcha
* @function
* @param {Object} link Mono link object
* @return
*/
exports.captcha = function (link) {
var res = link.res;
// Generate number and store it in sessions cache
var number = parseInt(Math.random() * 9000 + 1000);
if (!link.session || !link.session._sid) {
sessions[link.session._sid] = number;
}
var cap = new CaptchaPng(serverConfig.width, serverConfig.height, number);
var img = cap.getBase64();
var imgBase64 = new Buffer(img, "base64");
res.writeHead(200, {
"Content-Type": "image/png"
});
res.end(imgBase64);
};
## Instruction:
Add background and text color
## Code After:
// Dependencies
var CaptchaPng = require("captchapng");
// Sessions
var sessions = {};
// Captcha configuration
var serverConfig = {
width: 100,
height: 30
};
// Get configuration
M.emit("captcha.getConfig", function (c) {
serverConfig = c;
});
// Verify captcha
M.on("captcha.verify", function (link, answer, callback) {
var sid = link.session && link.session._sid;
callback(answer === sessions[sid]);
});
/**
* captcha
* Serves the captcha image
*
* @name captcha
* @function
* @param {Object} link Mono link object
* @return
*/
exports.captcha = function (link) {
var res = link.res;
// Generate number and store it in sessions cache
var number = parseInt(Math.random() * 9000 + 1000);
if (!link.session || !link.session._sid) {
sessions[link.session._sid] = number;
}
var cap = new CaptchaPng(serverConfig.width, serverConfig.height, number);
cap.color(0, 100, 0, 0);
cap.color(80, 80, 80, 255);
var img = cap.getBase64();
var imgBase64 = new Buffer(img, "base64");
res.writeHead(200, {
"Content-Type": "image/png"
});
res.end(imgBase64);
};
| // Dependencies
var CaptchaPng = require("captchapng");
// Sessions
var sessions = {};
// Captcha configuration
var serverConfig = {
width: 100,
height: 30
};
// Get configuration
M.emit("captcha.getConfig", function (c) {
serverConfig = c;
});
// Verify captcha
M.on("captcha.verify", function (link, answer, callback) {
var sid = link.session && link.session._sid;
callback(answer === sessions[sid]);
});
/**
* captcha
* Serves the captcha image
*
* @name captcha
* @function
* @param {Object} link Mono link object
* @return
*/
exports.captcha = function (link) {
var res = link.res;
// Generate number and store it in sessions cache
var number = parseInt(Math.random() * 9000 + 1000);
if (!link.session || !link.session._sid) {
sessions[link.session._sid] = number;
}
var cap = new CaptchaPng(serverConfig.width, serverConfig.height, number);
+ cap.color(0, 100, 0, 0);
+ cap.color(80, 80, 80, 255);
+
var img = cap.getBase64();
var imgBase64 = new Buffer(img, "base64");
res.writeHead(200, {
"Content-Type": "image/png"
});
res.end(imgBase64);
}; | 3 | 0.058824 | 3 | 0 |
8215e4ed702c7217ced47833895b8818e5cc9c7e | src/test/java/at/ac/tuwien/inso/integration_tests/StudentRecommendedCoursesTests.java | src/test/java/at/ac/tuwien/inso/integration_tests/StudentRecommendedCoursesTests.java | package at.ac.tuwien.inso.integration_tests;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.transaction.annotation.Transactional;
@RunWith(SpringRunner.class)
@SpringBootTest
@AutoConfigureMockMvc
@ActiveProfiles("test")
@Transactional
public class StudentRecommendedCoursesTests {
@Test
public void studentRecommendedCoursesTest() {
//TODO
}
@Test
public void dismissCourseTest() {
//TODO
}
}
| package at.ac.tuwien.inso.integration_tests;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.http.MediaType;
import org.springframework.test.annotation.Rollback;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.transaction.annotation.Transactional;
import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.csrf;
import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.user;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.model;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.redirectedUrl;
@RunWith(SpringRunner.class)
@SpringBootTest
@AutoConfigureMockMvc
@ActiveProfiles("test")
@Transactional
public class StudentRecommendedCoursesTests extends AbstractCoursesTests{
@Test
public void studentRecommendedCoursesTest() throws Exception {
mockMvc.perform(
get("/student/recommended").with(user(studentUserAccount))
).andExpect(
model().attributeExists("recommendedCourses")
);
}
@Test
@Rollback
public void dismissCourseTest() throws Exception {
mockMvc.perform(
post("/student/recommended")
.with(user(studentUserAccount))
.with(csrf())
.contentType(MediaType.APPLICATION_FORM_URLENCODED_VALUE)
.content("dismissedId="+calculusWS2016.getId())
).andExpect(
result -> {
if (studentRepository.findByAccount(studentUserAccount).getDismissedCourses().size() <= 0) {
throw new Exception("Size of dismissed courses for student " + student + " is zero");
}
if (!studentRepository.findByAccount(studentUserAccount).getDismissedCourses().contains(calculusWS2016)) {
throw new Exception("Wrong dismissed course!");
}
}
).andExpect(
redirectedUrl("/student/recommended")
);
}
}
| Add tests for RecommendedCourses and Dismiss course | Add tests for RecommendedCourses and Dismiss course
| Java | mit | university-information-system/uis,university-information-system/uis,university-information-system/uis,university-information-system/uis | java | ## Code Before:
package at.ac.tuwien.inso.integration_tests;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.transaction.annotation.Transactional;
@RunWith(SpringRunner.class)
@SpringBootTest
@AutoConfigureMockMvc
@ActiveProfiles("test")
@Transactional
public class StudentRecommendedCoursesTests {
@Test
public void studentRecommendedCoursesTest() {
//TODO
}
@Test
public void dismissCourseTest() {
//TODO
}
}
## Instruction:
Add tests for RecommendedCourses and Dismiss course
## Code After:
package at.ac.tuwien.inso.integration_tests;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.http.MediaType;
import org.springframework.test.annotation.Rollback;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.transaction.annotation.Transactional;
import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.csrf;
import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.user;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.model;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.redirectedUrl;
@RunWith(SpringRunner.class)
@SpringBootTest
@AutoConfigureMockMvc
@ActiveProfiles("test")
@Transactional
public class StudentRecommendedCoursesTests extends AbstractCoursesTests{
@Test
public void studentRecommendedCoursesTest() throws Exception {
mockMvc.perform(
get("/student/recommended").with(user(studentUserAccount))
).andExpect(
model().attributeExists("recommendedCourses")
);
}
@Test
@Rollback
public void dismissCourseTest() throws Exception {
mockMvc.perform(
post("/student/recommended")
.with(user(studentUserAccount))
.with(csrf())
.contentType(MediaType.APPLICATION_FORM_URLENCODED_VALUE)
.content("dismissedId="+calculusWS2016.getId())
).andExpect(
result -> {
if (studentRepository.findByAccount(studentUserAccount).getDismissedCourses().size() <= 0) {
throw new Exception("Size of dismissed courses for student " + student + " is zero");
}
if (!studentRepository.findByAccount(studentUserAccount).getDismissedCourses().contains(calculusWS2016)) {
throw new Exception("Wrong dismissed course!");
}
}
).andExpect(
redirectedUrl("/student/recommended")
);
}
}
| package at.ac.tuwien.inso.integration_tests;
import org.junit.Test;
import org.junit.runner.RunWith;
+ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc;
import org.springframework.boot.test.context.SpringBootTest;
+ import org.springframework.http.MediaType;
+ import org.springframework.test.annotation.Rollback;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.junit4.SpringRunner;
+ import org.springframework.test.web.servlet.MockMvc;
import org.springframework.transaction.annotation.Transactional;
+
+ import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.csrf;
+ import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.user;
+ import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
+ import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
+ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.model;
+ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.redirectedUrl;
@RunWith(SpringRunner.class)
@SpringBootTest
@AutoConfigureMockMvc
@ActiveProfiles("test")
@Transactional
- public class StudentRecommendedCoursesTests {
+ public class StudentRecommendedCoursesTests extends AbstractCoursesTests{
? ++++++++++++++++++++++++++++
+
@Test
- public void studentRecommendedCoursesTest() {
+ public void studentRecommendedCoursesTest() throws Exception {
? +++++++++++++++++
- //TODO
+ mockMvc.perform(
+ get("/student/recommended").with(user(studentUserAccount))
+ ).andExpect(
+ model().attributeExists("recommendedCourses")
+ );
}
@Test
+ @Rollback
- public void dismissCourseTest() {
+ public void dismissCourseTest() throws Exception {
? +++++++++++++++++
- //TODO
+ mockMvc.perform(
+ post("/student/recommended")
+ .with(user(studentUserAccount))
+ .with(csrf())
+ .contentType(MediaType.APPLICATION_FORM_URLENCODED_VALUE)
+ .content("dismissedId="+calculusWS2016.getId())
+ ).andExpect(
+ result -> {
+ if (studentRepository.findByAccount(studentUserAccount).getDismissedCourses().size() <= 0) {
+ throw new Exception("Size of dismissed courses for student " + student + " is zero");
+ }
+ if (!studentRepository.findByAccount(studentUserAccount).getDismissedCourses().contains(calculusWS2016)) {
+ throw new Exception("Wrong dismissed course!");
+ }
+ }
+ ).andExpect(
+ redirectedUrl("/student/recommended")
+ );
}
} | 44 | 1.62963 | 39 | 5 |
1406e592996dbb13794248979b208cdd059b24f9 | src/CMakeLists.txt | src/CMakeLists.txt | include(ECMMarkNonGuiExecutable)
ecm_create_qm_loader(kdesignerplugin_QM_LOADER kdesignerplugin5_qt)
set(kgendesignerplugin_SRCS
kgendesignerplugin.cpp
${kdesignerplugin_QM_LOADER}
)
add_executable(kgendesignerplugin ${kgendesignerplugin_SRCS})
add_executable(KF5::kgendesignerplugin ALIAS kgendesignerplugin)
ecm_mark_nongui_executable(kgendesignerplugin)
target_link_libraries(kgendesignerplugin KF5::ConfigCore # KConfig + KConfigGroup
KF5::CoreAddons # KMacroExpander
)
install(TARGETS kgendesignerplugin EXPORT KF5DesignerPluginTargets ${KF5_INSTALL_TARGETS_DEFAULT_ARGS} )
| include(ECMMarkNonGuiExecutable)
add_executable(kgendesignerplugin)
add_executable(KF5::kgendesignerplugin ALIAS kgendesignerplugin)
ecm_create_qm_loader(kgendesignerplugin kdesignerplugin5_qt)
target_sources(kgendesignerplugin PRIVATE
kgendesignerplugin.cpp
)
ecm_mark_nongui_executable(kgendesignerplugin)
target_link_libraries(kgendesignerplugin KF5::ConfigCore # KConfig + KConfigGroup
KF5::CoreAddons # KMacroExpander
)
install(TARGETS kgendesignerplugin EXPORT KF5DesignerPluginTargets ${KF5_INSTALL_TARGETS_DEFAULT_ARGS} )
| Use more target-centric cmake code | Use more target-centric cmake code
NO_CHANGELOG
| Text | lgpl-2.1 | KDE/kdesignerplugin,KDE/kdesignerplugin | text | ## Code Before:
include(ECMMarkNonGuiExecutable)
ecm_create_qm_loader(kdesignerplugin_QM_LOADER kdesignerplugin5_qt)
set(kgendesignerplugin_SRCS
kgendesignerplugin.cpp
${kdesignerplugin_QM_LOADER}
)
add_executable(kgendesignerplugin ${kgendesignerplugin_SRCS})
add_executable(KF5::kgendesignerplugin ALIAS kgendesignerplugin)
ecm_mark_nongui_executable(kgendesignerplugin)
target_link_libraries(kgendesignerplugin KF5::ConfigCore # KConfig + KConfigGroup
KF5::CoreAddons # KMacroExpander
)
install(TARGETS kgendesignerplugin EXPORT KF5DesignerPluginTargets ${KF5_INSTALL_TARGETS_DEFAULT_ARGS} )
## Instruction:
Use more target-centric cmake code
NO_CHANGELOG
## Code After:
include(ECMMarkNonGuiExecutable)
add_executable(kgendesignerplugin)
add_executable(KF5::kgendesignerplugin ALIAS kgendesignerplugin)
ecm_create_qm_loader(kgendesignerplugin kdesignerplugin5_qt)
target_sources(kgendesignerplugin PRIVATE
kgendesignerplugin.cpp
)
ecm_mark_nongui_executable(kgendesignerplugin)
target_link_libraries(kgendesignerplugin KF5::ConfigCore # KConfig + KConfigGroup
KF5::CoreAddons # KMacroExpander
)
install(TARGETS kgendesignerplugin EXPORT KF5DesignerPluginTargets ${KF5_INSTALL_TARGETS_DEFAULT_ARGS} )
| include(ECMMarkNonGuiExecutable)
- ecm_create_qm_loader(kdesignerplugin_QM_LOADER kdesignerplugin5_qt)
+ add_executable(kgendesignerplugin)
+ add_executable(KF5::kgendesignerplugin ALIAS kgendesignerplugin)
- set(kgendesignerplugin_SRCS
+ ecm_create_qm_loader(kgendesignerplugin kdesignerplugin5_qt)
+
+ target_sources(kgendesignerplugin PRIVATE
kgendesignerplugin.cpp
- ${kdesignerplugin_QM_LOADER}
)
- add_executable(kgendesignerplugin ${kgendesignerplugin_SRCS})
- add_executable(KF5::kgendesignerplugin ALIAS kgendesignerplugin)
ecm_mark_nongui_executable(kgendesignerplugin)
target_link_libraries(kgendesignerplugin KF5::ConfigCore # KConfig + KConfigGroup
KF5::CoreAddons # KMacroExpander
)
install(TARGETS kgendesignerplugin EXPORT KF5DesignerPluginTargets ${KF5_INSTALL_TARGETS_DEFAULT_ARGS} ) | 10 | 0.588235 | 5 | 5 |
3a82364fa0ee0dc814d5ffd64a80aaffe7ed1bf5 | SingularityUI/app/collections/TasksActive.coffee | SingularityUI/app/collections/TasksActive.coffee | Tasks = require './Tasks'
Task = require '../models/Task'
class TasksActive extends Tasks
model: Task
url: "#{ env.SINGULARITY_BASE }/#{ constants.apiBase }/tasks/active"
parse: (tasks) ->
_.each tasks, (task, i) =>
task.JSONString = utils.stringJSON task
task.id = task.taskId.id
task.name = task.mesosTask.name
task.resources = @parseResources task
task.memoryHuman = if task.resources?.memoryMb? then "#{ task.resources.memoryMb }Mb" else ''
task.host = task.offer.hostname?.split('.')[0]
task.startedAt = task.taskId.startedAt
task.startedAtHuman = moment(task.taskId.startedAt).from()
task.rack = task.taskId.rackId
tasks[i] = task
app.allTasks[task.id] = task
tasks
parseResources: (task) ->
cpus: _.find(task.mesosTask.resources, (resource) -> resource.name is 'cpus')?.scalar?.value ? ''
memoryMb: _.find(task.mesosTask.resources, (resource) -> resource.name is 'mem')?.scalar?.value ? ''
comparator: 'startedAt'
module.exports = TasksActive | Tasks = require './Tasks'
Task = require '../models/Task'
class TasksActive extends Tasks
model: Task
url: "#{ env.SINGULARITY_BASE }/#{ constants.apiBase }/tasks/active"
parse: (tasks) ->
_.each tasks, (task, i) =>
task.JSONString = utils.stringJSON task
task.id = task.taskId.id
task.requestId = task.taskRequest?.request.id
task.name = task.mesosTask.name
task.resources = @parseResources task
task.memoryHuman = if task.resources?.memoryMb? then "#{ task.resources.memoryMb }Mb" else ''
task.host = task.offer.hostname?.split('.')[0]
task.startedAt = task.taskId.startedAt
task.startedAtHuman = moment(task.taskId.startedAt).from()
task.rack = task.taskId.rackId
tasks[i] = task
app.allTasks[task.id] = task
tasks
parseResources: (task) ->
cpus: _.find(task.mesosTask.resources, (resource) -> resource.name is 'cpus')?.scalar?.value ? ''
memoryMb: _.find(task.mesosTask.resources, (resource) -> resource.name is 'mem')?.scalar?.value ? ''
comparator: 'startedAt'
module.exports = TasksActive | Add requestId as a top-level attribute of an active task | Add requestId as a top-level attribute of an active task | CoffeeScript | apache-2.0 | andrhamm/Singularity,calebTomlinson/Singularity,andrhamm/Singularity,evertrue/Singularity,calebTomlinson/Singularity,hs-jenkins-bot/Singularity,tejasmanohar/Singularity,nvoron23/Singularity,hs-jenkins-bot/Singularity,mjball/Singularity,stevenschlansker/Singularity,HubSpot/Singularity,grepsr/Singularity,HubSpot/Singularity,acbellini/Singularity,nvoron23/Singularity,mjball/Singularity,mjball/Singularity,acbellini/Singularity,stevenschlansker/Singularity,grepsr/Singularity,HubSpot/Singularity,calebTomlinson/Singularity,HubSpot/Singularity,acbellini/Singularity,andrhamm/Singularity,grepsr/Singularity,andrhamm/Singularity,acbellini/Singularity,tejasmanohar/Singularity,grepsr/Singularity,evertrue/Singularity,mjball/Singularity,calebTomlinson/Singularity,calebTomlinson/Singularity,grepsr/Singularity,hs-jenkins-bot/Singularity,mjball/Singularity,tejasmanohar/Singularity,andrhamm/Singularity,nvoron23/Singularity,stevenschlansker/Singularity,stevenschlansker/Singularity,grepsr/Singularity,acbellini/Singularity,nvoron23/Singularity,hs-jenkins-bot/Singularity,stevenschlansker/Singularity,calebTomlinson/Singularity,nvoron23/Singularity,tejasmanohar/Singularity,acbellini/Singularity,evertrue/Singularity,evertrue/Singularity,HubSpot/Singularity,stevenschlansker/Singularity,nvoron23/Singularity,tejasmanohar/Singularity,evertrue/Singularity,hs-jenkins-bot/Singularity,tejasmanohar/Singularity,evertrue/Singularity | coffeescript | ## Code Before:
Tasks = require './Tasks'
Task = require '../models/Task'
class TasksActive extends Tasks
model: Task
url: "#{ env.SINGULARITY_BASE }/#{ constants.apiBase }/tasks/active"
parse: (tasks) ->
_.each tasks, (task, i) =>
task.JSONString = utils.stringJSON task
task.id = task.taskId.id
task.name = task.mesosTask.name
task.resources = @parseResources task
task.memoryHuman = if task.resources?.memoryMb? then "#{ task.resources.memoryMb }Mb" else ''
task.host = task.offer.hostname?.split('.')[0]
task.startedAt = task.taskId.startedAt
task.startedAtHuman = moment(task.taskId.startedAt).from()
task.rack = task.taskId.rackId
tasks[i] = task
app.allTasks[task.id] = task
tasks
parseResources: (task) ->
cpus: _.find(task.mesosTask.resources, (resource) -> resource.name is 'cpus')?.scalar?.value ? ''
memoryMb: _.find(task.mesosTask.resources, (resource) -> resource.name is 'mem')?.scalar?.value ? ''
comparator: 'startedAt'
module.exports = TasksActive
## Instruction:
Add requestId as a top-level attribute of an active task
## Code After:
Tasks = require './Tasks'
Task = require '../models/Task'
class TasksActive extends Tasks
model: Task
url: "#{ env.SINGULARITY_BASE }/#{ constants.apiBase }/tasks/active"
parse: (tasks) ->
_.each tasks, (task, i) =>
task.JSONString = utils.stringJSON task
task.id = task.taskId.id
task.requestId = task.taskRequest?.request.id
task.name = task.mesosTask.name
task.resources = @parseResources task
task.memoryHuman = if task.resources?.memoryMb? then "#{ task.resources.memoryMb }Mb" else ''
task.host = task.offer.hostname?.split('.')[0]
task.startedAt = task.taskId.startedAt
task.startedAtHuman = moment(task.taskId.startedAt).from()
task.rack = task.taskId.rackId
tasks[i] = task
app.allTasks[task.id] = task
tasks
parseResources: (task) ->
cpus: _.find(task.mesosTask.resources, (resource) -> resource.name is 'cpus')?.scalar?.value ? ''
memoryMb: _.find(task.mesosTask.resources, (resource) -> resource.name is 'mem')?.scalar?.value ? ''
comparator: 'startedAt'
module.exports = TasksActive | Tasks = require './Tasks'
Task = require '../models/Task'
class TasksActive extends Tasks
model: Task
url: "#{ env.SINGULARITY_BASE }/#{ constants.apiBase }/tasks/active"
parse: (tasks) ->
_.each tasks, (task, i) =>
task.JSONString = utils.stringJSON task
task.id = task.taskId.id
+ task.requestId = task.taskRequest?.request.id
task.name = task.mesosTask.name
task.resources = @parseResources task
task.memoryHuman = if task.resources?.memoryMb? then "#{ task.resources.memoryMb }Mb" else ''
task.host = task.offer.hostname?.split('.')[0]
task.startedAt = task.taskId.startedAt
task.startedAtHuman = moment(task.taskId.startedAt).from()
task.rack = task.taskId.rackId
tasks[i] = task
app.allTasks[task.id] = task
tasks
parseResources: (task) ->
cpus: _.find(task.mesosTask.resources, (resource) -> resource.name is 'cpus')?.scalar?.value ? ''
memoryMb: _.find(task.mesosTask.resources, (resource) -> resource.name is 'mem')?.scalar?.value ? ''
comparator: 'startedAt'
module.exports = TasksActive | 1 | 0.03125 | 1 | 0 |
f3e0cc4b5a778b04373773dabd27be8782b1af93 | cosmo_tester/test_suites/snapshots/conftest.py | cosmo_tester/test_suites/snapshots/conftest.py | import pytest
from cosmo_tester.framework.test_hosts import Hosts
from cosmo_tester.test_suites.snapshots import get_multi_tenant_versions_list
@pytest.fixture(scope='function', params=get_multi_tenant_versions_list())
def hosts(request, ssh_key, module_tmpdir, test_config, logger):
hosts = Hosts(
ssh_key, module_tmpdir,
test_config, logger, request,
number_of_instances=3,
)
hosts.instances[0].image_type = request.param
vm = hosts.instances[2]
vm.image_name = test_config.platform['centos_7_image']
vm.username = test_config['test_os_usernames']['centos_7']
hosts.create()
try:
yield hosts
finally:
hosts.destroy()
| import pytest
from cosmo_tester.framework.test_hosts import Hosts, get_image
from cosmo_tester.test_suites.snapshots import get_multi_tenant_versions_list
@pytest.fixture(scope='function', params=get_multi_tenant_versions_list())
def hosts(request, ssh_key, module_tmpdir, test_config, logger):
hosts = Hosts(
ssh_key, module_tmpdir,
test_config, logger, request,
number_of_instances=3,
)
hosts.instances[0] = get_image(request.param, test_config)
hosts.instances[1] = get_image('master', test_config)
hosts.instances[2] = get_image('centos', test_config)
vm = hosts.instances[2]
vm.image_name = test_config.platform['centos_7_image']
vm.username = test_config['test_os_usernames']['centos_7']
hosts.create()
try:
yield hosts
finally:
hosts.destroy()
| Use specified images for snapshot fixture | Use specified images for snapshot fixture
| Python | apache-2.0 | cloudify-cosmo/cloudify-system-tests,cloudify-cosmo/cloudify-system-tests | python | ## Code Before:
import pytest
from cosmo_tester.framework.test_hosts import Hosts
from cosmo_tester.test_suites.snapshots import get_multi_tenant_versions_list
@pytest.fixture(scope='function', params=get_multi_tenant_versions_list())
def hosts(request, ssh_key, module_tmpdir, test_config, logger):
hosts = Hosts(
ssh_key, module_tmpdir,
test_config, logger, request,
number_of_instances=3,
)
hosts.instances[0].image_type = request.param
vm = hosts.instances[2]
vm.image_name = test_config.platform['centos_7_image']
vm.username = test_config['test_os_usernames']['centos_7']
hosts.create()
try:
yield hosts
finally:
hosts.destroy()
## Instruction:
Use specified images for snapshot fixture
## Code After:
import pytest
from cosmo_tester.framework.test_hosts import Hosts, get_image
from cosmo_tester.test_suites.snapshots import get_multi_tenant_versions_list
@pytest.fixture(scope='function', params=get_multi_tenant_versions_list())
def hosts(request, ssh_key, module_tmpdir, test_config, logger):
hosts = Hosts(
ssh_key, module_tmpdir,
test_config, logger, request,
number_of_instances=3,
)
hosts.instances[0] = get_image(request.param, test_config)
hosts.instances[1] = get_image('master', test_config)
hosts.instances[2] = get_image('centos', test_config)
vm = hosts.instances[2]
vm.image_name = test_config.platform['centos_7_image']
vm.username = test_config['test_os_usernames']['centos_7']
hosts.create()
try:
yield hosts
finally:
hosts.destroy()
| import pytest
- from cosmo_tester.framework.test_hosts import Hosts
+ from cosmo_tester.framework.test_hosts import Hosts, get_image
? +++++++++++
from cosmo_tester.test_suites.snapshots import get_multi_tenant_versions_list
@pytest.fixture(scope='function', params=get_multi_tenant_versions_list())
def hosts(request, ssh_key, module_tmpdir, test_config, logger):
hosts = Hosts(
ssh_key, module_tmpdir,
test_config, logger, request,
number_of_instances=3,
)
- hosts.instances[0].image_type = request.param
+ hosts.instances[0] = get_image(request.param, test_config)
+ hosts.instances[1] = get_image('master', test_config)
+ hosts.instances[2] = get_image('centos', test_config)
vm = hosts.instances[2]
vm.image_name = test_config.platform['centos_7_image']
vm.username = test_config['test_os_usernames']['centos_7']
hosts.create()
try:
yield hosts
finally:
hosts.destroy() | 6 | 0.230769 | 4 | 2 |
fece0019a54534b56960a30785bb70edb5d205bf | example_base/forms.py | example_base/forms.py | from base.form_utils import RequiredFieldForm
from .models import Document
from base.form_utils import FileDropInput
class DocumentForm(RequiredFieldForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for name in ('file', 'description'):
self.fields[name].widget.attrs.update(
{'class': 'pure-input-2-3'}
)
class Meta:
model = Document
fields = (
'file',
'description',
)
widgets = {'file': FileDropInput()}
| from django import forms
from base.form_utils import RequiredFieldForm, FileDropInput
from .models import Document
class DocumentForm(RequiredFieldForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for name in ('file', 'description'):
self.fields[name].widget.attrs.update(
{'class': 'pure-input-2-3'}
)
class Meta:
model = Document
fields = (
'file',
'description',
)
# Not required RequiredFieldForm uses FileDropInput for FileField
# widgets = {'file': FileDropInput()}
# this is an example of how to use in a basic ModelForm
class BasicDocumentModelForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for name in ('file', 'description'):
self.fields[name].widget.attrs.update(
{'class': 'pure-input-2-3'}
)
class Meta:
model = Document
fields = (
'file',
'description',
)
widgets = {'file': FileDropInput()}
# this is an example of how to use in a basic Form
class NonModelForm(forms.Form):
file = forms.FileField(widget=FileDropInput)
description = forms.CharField(max_length=200)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for name in ('file', 'description'):
self.fields[name].widget.attrs.update(
{'class': 'pure-input-2-3'}
)
| Add examples of ways to use FileDropInput | Add examples of ways to use FileDropInput
| Python | apache-2.0 | pkimber/base,pkimber/base,pkimber/base,pkimber/base | python | ## Code Before:
from base.form_utils import RequiredFieldForm
from .models import Document
from base.form_utils import FileDropInput
class DocumentForm(RequiredFieldForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for name in ('file', 'description'):
self.fields[name].widget.attrs.update(
{'class': 'pure-input-2-3'}
)
class Meta:
model = Document
fields = (
'file',
'description',
)
widgets = {'file': FileDropInput()}
## Instruction:
Add examples of ways to use FileDropInput
## Code After:
from django import forms
from base.form_utils import RequiredFieldForm, FileDropInput
from .models import Document
class DocumentForm(RequiredFieldForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for name in ('file', 'description'):
self.fields[name].widget.attrs.update(
{'class': 'pure-input-2-3'}
)
class Meta:
model = Document
fields = (
'file',
'description',
)
# Not required RequiredFieldForm uses FileDropInput for FileField
# widgets = {'file': FileDropInput()}
# this is an example of how to use in a basic ModelForm
class BasicDocumentModelForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for name in ('file', 'description'):
self.fields[name].widget.attrs.update(
{'class': 'pure-input-2-3'}
)
class Meta:
model = Document
fields = (
'file',
'description',
)
widgets = {'file': FileDropInput()}
# this is an example of how to use in a basic Form
class NonModelForm(forms.Form):
file = forms.FileField(widget=FileDropInput)
description = forms.CharField(max_length=200)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for name in ('file', 'description'):
self.fields[name].widget.attrs.update(
{'class': 'pure-input-2-3'}
)
| + from django import forms
- from base.form_utils import RequiredFieldForm
+ from base.form_utils import RequiredFieldForm, FileDropInput
? +++++++++++++++
-
from .models import Document
- from base.form_utils import FileDropInput
class DocumentForm(RequiredFieldForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for name in ('file', 'description'):
self.fields[name].widget.attrs.update(
{'class': 'pure-input-2-3'}
)
class Meta:
model = Document
fields = (
'file',
'description',
)
+
+ # Not required RequiredFieldForm uses FileDropInput for FileField
+ # widgets = {'file': FileDropInput()}
+
+
+ # this is an example of how to use in a basic ModelForm
+ class BasicDocumentModelForm(forms.ModelForm):
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ for name in ('file', 'description'):
+ self.fields[name].widget.attrs.update(
+ {'class': 'pure-input-2-3'}
+ )
+
+ class Meta:
+ model = Document
+ fields = (
+ 'file',
+ 'description',
+ )
widgets = {'file': FileDropInput()}
+
+
+ # this is an example of how to use in a basic Form
+ class NonModelForm(forms.Form):
+
+ file = forms.FileField(widget=FileDropInput)
+ description = forms.CharField(max_length=200)
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ for name in ('file', 'description'):
+ self.fields[name].widget.attrs.update(
+ {'class': 'pure-input-2-3'}
+ ) | 40 | 1.818182 | 37 | 3 |
8cab27e4b17d1c181b809bc777a7735a334babee | README.md | README.md | Simple YouTube playback bot for Discord
| Simple YouTube playback bot for Discord
## Usage
Run Lethe using the email and password as command line arguments:
```
$ node lethe.js email@example.com hunter2
```
Then, run commands over Discord using the bot's username mention as a prefix, for example:
```
@Bot init
```
Where "Bot" is your bot's username.
## Commands
`init`: Initializes Lethe and binds it to the channel this command was run in and the first voice channel it can find (usually General).
`destroy`: Destroys Lethe's binding. This stops the current playback and unbinds it from the text and voice channel.
The following commands will only work inside the text channel Lethe was bound to.
`yt [id]`: Queues a video from YouTube to be played. If this is the only song in the queue, start playback. `queue [id]` does the same thing.
`list`: Lists the videos on the queue.
`next`: Stops the current playback and skips to the next video in the queue.
| Document how Lethe is used | Document how Lethe is used
| Markdown | mit | meew0/Lethe | markdown | ## Code Before:
Simple YouTube playback bot for Discord
## Instruction:
Document how Lethe is used
## Code After:
Simple YouTube playback bot for Discord
## Usage
Run Lethe using the email and password as command line arguments:
```
$ node lethe.js email@example.com hunter2
```
Then, run commands over Discord using the bot's username mention as a prefix, for example:
```
@Bot init
```
Where "Bot" is your bot's username.
## Commands
`init`: Initializes Lethe and binds it to the channel this command was run in and the first voice channel it can find (usually General).
`destroy`: Destroys Lethe's binding. This stops the current playback and unbinds it from the text and voice channel.
The following commands will only work inside the text channel Lethe was bound to.
`yt [id]`: Queues a video from YouTube to be played. If this is the only song in the queue, start playback. `queue [id]` does the same thing.
`list`: Lists the videos on the queue.
`next`: Stops the current playback and skips to the next video in the queue.
| Simple YouTube playback bot for Discord
+
+ ## Usage
+ Run Lethe using the email and password as command line arguments:
+
+ ```
+ $ node lethe.js email@example.com hunter2
+ ```
+
+ Then, run commands over Discord using the bot's username mention as a prefix, for example:
+
+ ```
+ @Bot init
+ ```
+
+ Where "Bot" is your bot's username.
+
+ ## Commands
+
+ `init`: Initializes Lethe and binds it to the channel this command was run in and the first voice channel it can find (usually General).
+ `destroy`: Destroys Lethe's binding. This stops the current playback and unbinds it from the text and voice channel.
+
+ The following commands will only work inside the text channel Lethe was bound to.
+
+ `yt [id]`: Queues a video from YouTube to be played. If this is the only song in the queue, start playback. `queue [id]` does the same thing.
+ `list`: Lists the videos on the queue.
+ `next`: Stops the current playback and skips to the next video in the queue. | 26 | 26 | 26 | 0 |
523708359201aa3269c372a5a9f8eb32c365e237 | spec/views/home/index.html.slim_spec.rb | spec/views/home/index.html.slim_spec.rb | require 'rails_helper'
RSpec.describe "home/index.html.slim", type: :view do
include Devise::TestHelpers
let(:user) { FactoryGirl.create :user }
it 'contain our dashboard header' do
sign_in user
render
expect(rendered).to include('eligible for benefits-based remission')
end
end
| require 'rails_helper'
RSpec.describe "home/index.html.slim", type: :view do
include Devise::TestHelpers
let(:user) { FactoryGirl.create :user }
let(:manager) { FactoryGirl.create :manager }
let(:admin) { FactoryGirl.create :admin_user }
context 'public access' do
it 'shows a restriction message' do
render
expect(rendered).to have_xpath('//div', text: /This system is restricted/)
end
end
context 'user access' do
it 'displays guidance' do
sign_in user
render
expect(rendered).to have_xpath('//span[@class="bold"]', text: /Check benefits/)
expect(rendered).to have_xpath('//p', text: /eligible for benefits-based remission/)
end
end
context 'manager access' do
it 'displays a dwp checklist' do
sign_in manager
render
expect(rendered).to have_xpath('//h2', text: /Manager summary for/)
expect(rendered).to have_xpath('//th', text: 'Staff member')
end
end
context 'admin access' do
it 'displays graphs' do
sign_in admin
render
expect(rendered).to have_xpath('//h2', text: 'Total')
end
end
end
| Improve test coverage of index view | Improve test coverage of index view
| Ruby | mit | ministryofjustice/fr-staffapp,ministryofjustice/fr-staffapp,ministryofjustice/fr-staffapp,ministryofjustice/fr-staffapp | ruby | ## Code Before:
require 'rails_helper'
RSpec.describe "home/index.html.slim", type: :view do
include Devise::TestHelpers
let(:user) { FactoryGirl.create :user }
it 'contain our dashboard header' do
sign_in user
render
expect(rendered).to include('eligible for benefits-based remission')
end
end
## Instruction:
Improve test coverage of index view
## Code After:
require 'rails_helper'
RSpec.describe "home/index.html.slim", type: :view do
include Devise::TestHelpers
let(:user) { FactoryGirl.create :user }
let(:manager) { FactoryGirl.create :manager }
let(:admin) { FactoryGirl.create :admin_user }
context 'public access' do
it 'shows a restriction message' do
render
expect(rendered).to have_xpath('//div', text: /This system is restricted/)
end
end
context 'user access' do
it 'displays guidance' do
sign_in user
render
expect(rendered).to have_xpath('//span[@class="bold"]', text: /Check benefits/)
expect(rendered).to have_xpath('//p', text: /eligible for benefits-based remission/)
end
end
context 'manager access' do
it 'displays a dwp checklist' do
sign_in manager
render
expect(rendered).to have_xpath('//h2', text: /Manager summary for/)
expect(rendered).to have_xpath('//th', text: 'Staff member')
end
end
context 'admin access' do
it 'displays graphs' do
sign_in admin
render
expect(rendered).to have_xpath('//h2', text: 'Total')
end
end
end
| require 'rails_helper'
RSpec.describe "home/index.html.slim", type: :view do
include Devise::TestHelpers
- let(:user) { FactoryGirl.create :user }
? ----
+ let(:user) { FactoryGirl.create :user }
+ let(:manager) { FactoryGirl.create :manager }
+ let(:admin) { FactoryGirl.create :admin_user }
- it 'contain our dashboard header' do
+ context 'public access' do
+ it 'shows a restriction message' do
+ render
+ expect(rendered).to have_xpath('//div', text: /This system is restricted/)
+ end
+ end
+
+ context 'user access' do
+ it 'displays guidance' do
- sign_in user
+ sign_in user
? ++
- render
+ render
? ++
+ expect(rendered).to have_xpath('//span[@class="bold"]', text: /Check benefits/)
- expect(rendered).to include('eligible for benefits-based remission')
? ^^^^^^ ^
+ expect(rendered).to have_xpath('//p', text: /eligible for benefits-based remission/)
? ++ ^^^ ++++++ +++++++++++++ ^
+ end
+ end
+
+ context 'manager access' do
+ it 'displays a dwp checklist' do
+ sign_in manager
+ render
+ expect(rendered).to have_xpath('//h2', text: /Manager summary for/)
+ expect(rendered).to have_xpath('//th', text: 'Staff member')
+ end
+ end
+
+ context 'admin access' do
+ it 'displays graphs' do
+ sign_in admin
+ render
+ expect(rendered).to have_xpath('//h2', text: 'Total')
+ end
end
end | 39 | 2.785714 | 34 | 5 |
a16d87d92a562678235b3a9f10a1e1c92469fb3d | app/models/user.rb | app/models/user.rb | class User < ActiveRecord::Base
has_many :games
validates_presence_of :username, :email, :password_digest, unless: :guest?
validates_uniqueness_of :username, allow_blank: true
require 'bcrypt'
attr_reader :password
include ActiveModel::SecurePassword::InstanceMethodsOnActivation
def name
guest ? "Guest" : username
end
def self.new_guest
new { |u| u.guest = true }
end
def move_to(user)
games.update_all(user_id: user.id)
end
end
| class User < ActiveRecord::Base
has_many :games
validates_presence_of :username, :email, :password_digest, unless: :guest?
validates_uniqueness_of :username, allow_blank: true
require 'bcrypt'
attr_reader :password
include ActiveModel::SecurePassword::InstanceMethodsOnActivation
def name
guest ? "Guest" : username
end
def self.new_guest
new { |u| u.guest = true }
end
end
| Remove unnecessary move to method | Remove unnecessary move to method
| Ruby | mit | woolly-bear-caterpillars-2014/Tetroggleable,stcrestrada/Tetroggleable,woolly-bear-caterpillars-2014/Tetroggleable,stcrestrada/Tetroggleable | ruby | ## Code Before:
class User < ActiveRecord::Base
has_many :games
validates_presence_of :username, :email, :password_digest, unless: :guest?
validates_uniqueness_of :username, allow_blank: true
require 'bcrypt'
attr_reader :password
include ActiveModel::SecurePassword::InstanceMethodsOnActivation
def name
guest ? "Guest" : username
end
def self.new_guest
new { |u| u.guest = true }
end
def move_to(user)
games.update_all(user_id: user.id)
end
end
## Instruction:
Remove unnecessary move to method
## Code After:
class User < ActiveRecord::Base
has_many :games
validates_presence_of :username, :email, :password_digest, unless: :guest?
validates_uniqueness_of :username, allow_blank: true
require 'bcrypt'
attr_reader :password
include ActiveModel::SecurePassword::InstanceMethodsOnActivation
def name
guest ? "Guest" : username
end
def self.new_guest
new { |u| u.guest = true }
end
end
| class User < ActiveRecord::Base
has_many :games
validates_presence_of :username, :email, :password_digest, unless: :guest?
validates_uniqueness_of :username, allow_blank: true
require 'bcrypt'
attr_reader :password
include ActiveModel::SecurePassword::InstanceMethodsOnActivation
def name
guest ? "Guest" : username
end
def self.new_guest
new { |u| u.guest = true }
end
- def move_to(user)
- games.update_all(user_id: user.id)
- end
-
end | 4 | 0.166667 | 0 | 4 |
56958984221eb5e331e460df2f8962a8c9a1b840 | spec/rackups/status.ru | spec/rackups/status.ru | require 'sinatra'
class App < Sinatra::Base
get('/success') do
status 200
end
post('/404') do
status 404
end
post('/503') do
status 503
end
end
run App | require 'sinatra'
class App < Sinatra::Base
get('/v1/success') do
status 200
end
get('/v1/not_found') do
status 404
end
post('/v1/service_unavailable') do
status 503
end
end
run App | Add v1 prefixes for convenience. | Add v1 prefixes for convenience.
| Ruby | bsd-3-clause | librato/librato-metrics,winebarrel/librato-metrics | ruby | ## Code Before:
require 'sinatra'
class App < Sinatra::Base
get('/success') do
status 200
end
post('/404') do
status 404
end
post('/503') do
status 503
end
end
run App
## Instruction:
Add v1 prefixes for convenience.
## Code After:
require 'sinatra'
class App < Sinatra::Base
get('/v1/success') do
status 200
end
get('/v1/not_found') do
status 404
end
post('/v1/service_unavailable') do
status 503
end
end
run App | require 'sinatra'
class App < Sinatra::Base
- get('/success') do
+ get('/v1/success') do
? +++
status 200
end
- post('/404') do
+ get('/v1/not_found') do
status 404
end
- post('/503') do
+ post('/v1/service_unavailable') do
status 503
end
end
run App | 6 | 0.352941 | 3 | 3 |
3a2232f3a6882ac69c021cc16aa5b9b766532ad7 | README.rst | README.rst | DJ-Database-URL
~~~~~~~~~~~~~~
This simple Django utility allows you to utilize the
`12factor <http://www.12factor.net/backing-services>`_ inspired
``DATABASE_URL`` environment variable to configure your Django application.
Usage
-----
Configure your database in ``settings.py`` from ``DATABASE_URL``::
DATABASES['default'] = dj_database_url.config()
Parse an arbitrary Database URL::
DATABASES['default'] = dj_database_url.parse('postgres://...')
Supported databases
-------------------
Support currently exists for PostgreSQL, MySQL and SQLite.
SQLite connects to file based databases. The same URL format is used, omitting
the hostname, and using the "file" portion as the filename of the database.
This has the effect of four slashes being present for an absolute file path:
``sqlite:////full/path/to/your/database/file.sqlite``.
Installation
------------
Installation is simple too::
$ pip install dj-database-url
| DJ-Database-URL
~~~~~~~~~~~~~~
This simple Django utility allows you to utilize the
`12factor <http://www.12factor.net/backing-services>`_ inspired
``DATABASE_URL`` environment variable to configure your Django application.
Usage
-----
Configure your database in ``settings.py`` from ``DATABASE_URL``::
DATABASES = {'default': dj_database_url.config()}
Parse an arbitrary Database URL::
DATABASES = {'default': dj_database_url.parse('postgres://...')}
Supported databases
-------------------
Support currently exists for PostgreSQL, MySQL and SQLite.
SQLite connects to file based databases. The same URL format is used, omitting
the hostname, and using the "file" portion as the filename of the database.
This has the effect of four slashes being present for an absolute file path:
``sqlite:////full/path/to/your/database/file.sqlite``.
Installation
------------
Installation is simple too::
$ pip install dj-database-url
| Set the DATABASES dict rather than assigning to it. | Set the DATABASES dict rather than assigning to it.
Basically just to allow more copy-pasting from the README. I know it should be obvious but it was instinct to copy verbatim. | reStructuredText | bsd-2-clause | avorio/dj-database-url,julianwachholz/dj-config-url,kennethreitz/dj-database-url,f0r4y312/django-connection-url | restructuredtext | ## Code Before:
DJ-Database-URL
~~~~~~~~~~~~~~
This simple Django utility allows you to utilize the
`12factor <http://www.12factor.net/backing-services>`_ inspired
``DATABASE_URL`` environment variable to configure your Django application.
Usage
-----
Configure your database in ``settings.py`` from ``DATABASE_URL``::
DATABASES['default'] = dj_database_url.config()
Parse an arbitrary Database URL::
DATABASES['default'] = dj_database_url.parse('postgres://...')
Supported databases
-------------------
Support currently exists for PostgreSQL, MySQL and SQLite.
SQLite connects to file based databases. The same URL format is used, omitting
the hostname, and using the "file" portion as the filename of the database.
This has the effect of four slashes being present for an absolute file path:
``sqlite:////full/path/to/your/database/file.sqlite``.
Installation
------------
Installation is simple too::
$ pip install dj-database-url
## Instruction:
Set the DATABASES dict rather than assigning to it.
Basically just to allow more copy-pasting from the README. I know it should be obvious but it was instinct to copy verbatim.
## Code After:
DJ-Database-URL
~~~~~~~~~~~~~~
This simple Django utility allows you to utilize the
`12factor <http://www.12factor.net/backing-services>`_ inspired
``DATABASE_URL`` environment variable to configure your Django application.
Usage
-----
Configure your database in ``settings.py`` from ``DATABASE_URL``::
DATABASES = {'default': dj_database_url.config()}
Parse an arbitrary Database URL::
DATABASES = {'default': dj_database_url.parse('postgres://...')}
Supported databases
-------------------
Support currently exists for PostgreSQL, MySQL and SQLite.
SQLite connects to file based databases. The same URL format is used, omitting
the hostname, and using the "file" portion as the filename of the database.
This has the effect of four slashes being present for an absolute file path:
``sqlite:////full/path/to/your/database/file.sqlite``.
Installation
------------
Installation is simple too::
$ pip install dj-database-url
| DJ-Database-URL
~~~~~~~~~~~~~~
This simple Django utility allows you to utilize the
`12factor <http://www.12factor.net/backing-services>`_ inspired
``DATABASE_URL`` environment variable to configure your Django application.
Usage
-----
Configure your database in ``settings.py`` from ``DATABASE_URL``::
- DATABASES['default'] = dj_database_url.config()
? ^ ^^^
+ DATABASES = {'default': dj_database_url.config()}
? ^^^^ ^ +
Parse an arbitrary Database URL::
- DATABASES['default'] = dj_database_url.parse('postgres://...')
? ^ ^^^
+ DATABASES = {'default': dj_database_url.parse('postgres://...')}
? ^^^^ ^ +
Supported databases
-------------------
Support currently exists for PostgreSQL, MySQL and SQLite.
SQLite connects to file based databases. The same URL format is used, omitting
the hostname, and using the "file" portion as the filename of the database.
This has the effect of four slashes being present for an absolute file path:
``sqlite:////full/path/to/your/database/file.sqlite``.
Installation
------------
Installation is simple too::
$ pip install dj-database-url | 4 | 0.114286 | 2 | 2 |
0c4b7349954b0d245052bbfaa695a7c82b1097ce | upgrades/15.7-to-15.11-upgrade.toml | upgrades/15.7-to-15.11-upgrade.toml | Exclude = "target|.settings|.git|.svn|.idea"
[[Transformations]]
Filter = "pom.xml"
[[Transformations.Proc]]
Name = "ReplaceMavenDependency"
Params = [
"org.seedstack:seedstack-bom:15.7",
"org.seedstack:seed-bom:15.11-M1-SNAPSHOT"
]
| Exclude = "target|.settings|.git|.svn|.idea"
[[Transformations]]
Filter = "pom.xml"
[[Transformations.Proc]]
Name = "ReplaceMavenDependency"
Params = [
"org.seedstack:seedstack-bom:15.7",
"org.seedstack:seed-bom:15.11-M1-SNAPSHOT"
]
[[Transformations]]
Filter = "*.java"
[[Transformations.Proc]]
Name = "Replace"
Params = [
"org.seedstack.seed.core.api",
"org.seedstack.seed",
"org.seedstack.seed.core.api",
"org.seedstack.seed",
"org.seedstack.seed.cli.api",
"org.seedstack.seed.cli",
"org.seedstack.seed.crypto.api",
"org.seedstack.seed.crypto",
"org.seedstack.seed.el.api",
"org.seedstack.seed.el",
"org.seedstack.seed.metrics.api",
"org.seedstack.seed.metrics",
"org.seedstack.seed.rest.api",
"org.seedstack.seed.rest",
"org.seedstack.seed.security.api.annotations",
"org.seedstack.seed.security",
"org.seedstack.seed.security.api.exceptions",
"org.seedstack.seed.security",
"org.seedstack.seed.security.api",
"org.seedstack.seed.security",
"org.seedstack.seed.it.api",
"org.seedstack.seed.it",
"org.seedstack.seed.transaction.api",
"org.seedstack.seed.transaction",
"org.seedstack.seed.transaction.utils",
"org.seedstack.seed.transaction.spi",
"org.seedstack.seed.web.api",
"org.seedstack.seed.web",
"org.seedstack.seed.web.DelegateServletContextListenerImpl",
"org.seedstack.seed.web.listener.DelegateServletContextListenerImpl",
"org.seedstack.seed.web.SeedServletContextListener",
"org.seedstack.seed.web.listener.SeedServletContextListener",
]
| Add the seed package changes to the upgrade file | Add the seed package changes to the upgrade file
| TOML | mpl-2.0 | seedstack/distribution | toml | ## Code Before:
Exclude = "target|.settings|.git|.svn|.idea"
[[Transformations]]
Filter = "pom.xml"
[[Transformations.Proc]]
Name = "ReplaceMavenDependency"
Params = [
"org.seedstack:seedstack-bom:15.7",
"org.seedstack:seed-bom:15.11-M1-SNAPSHOT"
]
## Instruction:
Add the seed package changes to the upgrade file
## Code After:
Exclude = "target|.settings|.git|.svn|.idea"
[[Transformations]]
Filter = "pom.xml"
[[Transformations.Proc]]
Name = "ReplaceMavenDependency"
Params = [
"org.seedstack:seedstack-bom:15.7",
"org.seedstack:seed-bom:15.11-M1-SNAPSHOT"
]
[[Transformations]]
Filter = "*.java"
[[Transformations.Proc]]
Name = "Replace"
Params = [
"org.seedstack.seed.core.api",
"org.seedstack.seed",
"org.seedstack.seed.core.api",
"org.seedstack.seed",
"org.seedstack.seed.cli.api",
"org.seedstack.seed.cli",
"org.seedstack.seed.crypto.api",
"org.seedstack.seed.crypto",
"org.seedstack.seed.el.api",
"org.seedstack.seed.el",
"org.seedstack.seed.metrics.api",
"org.seedstack.seed.metrics",
"org.seedstack.seed.rest.api",
"org.seedstack.seed.rest",
"org.seedstack.seed.security.api.annotations",
"org.seedstack.seed.security",
"org.seedstack.seed.security.api.exceptions",
"org.seedstack.seed.security",
"org.seedstack.seed.security.api",
"org.seedstack.seed.security",
"org.seedstack.seed.it.api",
"org.seedstack.seed.it",
"org.seedstack.seed.transaction.api",
"org.seedstack.seed.transaction",
"org.seedstack.seed.transaction.utils",
"org.seedstack.seed.transaction.spi",
"org.seedstack.seed.web.api",
"org.seedstack.seed.web",
"org.seedstack.seed.web.DelegateServletContextListenerImpl",
"org.seedstack.seed.web.listener.DelegateServletContextListenerImpl",
"org.seedstack.seed.web.SeedServletContextListener",
"org.seedstack.seed.web.listener.SeedServletContextListener",
]
| Exclude = "target|.settings|.git|.svn|.idea"
[[Transformations]]
Filter = "pom.xml"
[[Transformations.Proc]]
Name = "ReplaceMavenDependency"
Params = [
"org.seedstack:seedstack-bom:15.7",
"org.seedstack:seed-bom:15.11-M1-SNAPSHOT"
]
+
+ [[Transformations]]
+ Filter = "*.java"
+
+ [[Transformations.Proc]]
+ Name = "Replace"
+ Params = [
+ "org.seedstack.seed.core.api",
+ "org.seedstack.seed",
+
+ "org.seedstack.seed.core.api",
+ "org.seedstack.seed",
+
+ "org.seedstack.seed.cli.api",
+ "org.seedstack.seed.cli",
+
+ "org.seedstack.seed.crypto.api",
+ "org.seedstack.seed.crypto",
+
+ "org.seedstack.seed.el.api",
+ "org.seedstack.seed.el",
+
+ "org.seedstack.seed.metrics.api",
+ "org.seedstack.seed.metrics",
+
+ "org.seedstack.seed.rest.api",
+ "org.seedstack.seed.rest",
+
+ "org.seedstack.seed.security.api.annotations",
+ "org.seedstack.seed.security",
+
+ "org.seedstack.seed.security.api.exceptions",
+ "org.seedstack.seed.security",
+
+ "org.seedstack.seed.security.api",
+ "org.seedstack.seed.security",
+
+ "org.seedstack.seed.it.api",
+ "org.seedstack.seed.it",
+
+ "org.seedstack.seed.transaction.api",
+ "org.seedstack.seed.transaction",
+
+ "org.seedstack.seed.transaction.utils",
+ "org.seedstack.seed.transaction.spi",
+
+ "org.seedstack.seed.web.api",
+ "org.seedstack.seed.web",
+
+ "org.seedstack.seed.web.DelegateServletContextListenerImpl",
+ "org.seedstack.seed.web.listener.DelegateServletContextListenerImpl",
+
+ "org.seedstack.seed.web.SeedServletContextListener",
+ "org.seedstack.seed.web.listener.SeedServletContextListener",
+ ] | 55 | 5 | 55 | 0 |
7d4cfcc95639834b8696131af9708d8edbba7d94 | README.md | README.md | Support for the blog posts at [tjaddison.com](http://tjaddison.com)
Contains various implementations of Azure functions (as scripts or class libraries) that allow an OMS alert to send a Slack notification.
Post Index:
- [Building better OMS alerts with function apps](http://tjaddison.com/2017/08/06/Building-better-OMS-alerts-with-function-apps.html)
- [Monitoring Disk, CPU, and memory with OMS](http://tjaddison.com/2017/08/21/Monitoring-disk-cpu-and-memory-with-OMS.html)
- [Migrating function app scripts with a class library](http://tjaddison.com/2017/08/21/Monitoring-disk-cpu-and-memory-with-OMS.html) | Support for the blog posts at [tjaddison.com](http://tjaddison.com)
Contains various implementations of Azure functions (as scripts or class libraries) that allow an OMS alert to send a Slack notification.
Post Index:
- [Building better OMS alerts with function apps](http://tjaddison.com/2017/08/06/Building-better-OMS-alerts-with-function-apps.html)
- [Monitoring Disk, CPU, and memory with OMS](http://tjaddison.com/2017/08/21/Monitoring-disk-cpu-and-memory-with-OMS.html)
- [Migrating function app scripts with a class library](http://tjaddison.com/2017/08/21/Monitoring-disk-cpu-and-memory-with-OMS.html)
- [Building an OMS metric alert to Slack bridge with Azure functions](http://tjaddison.com/2017/08/29/Building-an-OMS-metric-alert-to-Slack-bridge-with-Azure-functions) | Add link to generic function post | Add link to generic function post
| Markdown | mit | taddison/blog-oms-to-slack | markdown | ## Code Before:
Support for the blog posts at [tjaddison.com](http://tjaddison.com)
Contains various implementations of Azure functions (as scripts or class libraries) that allow an OMS alert to send a Slack notification.
Post Index:
- [Building better OMS alerts with function apps](http://tjaddison.com/2017/08/06/Building-better-OMS-alerts-with-function-apps.html)
- [Monitoring Disk, CPU, and memory with OMS](http://tjaddison.com/2017/08/21/Monitoring-disk-cpu-and-memory-with-OMS.html)
- [Migrating function app scripts with a class library](http://tjaddison.com/2017/08/21/Monitoring-disk-cpu-and-memory-with-OMS.html)
## Instruction:
Add link to generic function post
## Code After:
Support for the blog posts at [tjaddison.com](http://tjaddison.com)
Contains various implementations of Azure functions (as scripts or class libraries) that allow an OMS alert to send a Slack notification.
Post Index:
- [Building better OMS alerts with function apps](http://tjaddison.com/2017/08/06/Building-better-OMS-alerts-with-function-apps.html)
- [Monitoring Disk, CPU, and memory with OMS](http://tjaddison.com/2017/08/21/Monitoring-disk-cpu-and-memory-with-OMS.html)
- [Migrating function app scripts with a class library](http://tjaddison.com/2017/08/21/Monitoring-disk-cpu-and-memory-with-OMS.html)
- [Building an OMS metric alert to Slack bridge with Azure functions](http://tjaddison.com/2017/08/29/Building-an-OMS-metric-alert-to-Slack-bridge-with-Azure-functions) | Support for the blog posts at [tjaddison.com](http://tjaddison.com)
Contains various implementations of Azure functions (as scripts or class libraries) that allow an OMS alert to send a Slack notification.
Post Index:
- [Building better OMS alerts with function apps](http://tjaddison.com/2017/08/06/Building-better-OMS-alerts-with-function-apps.html)
- [Monitoring Disk, CPU, and memory with OMS](http://tjaddison.com/2017/08/21/Monitoring-disk-cpu-and-memory-with-OMS.html)
- [Migrating function app scripts with a class library](http://tjaddison.com/2017/08/21/Monitoring-disk-cpu-and-memory-with-OMS.html)
+ - [Building an OMS metric alert to Slack bridge with Azure functions](http://tjaddison.com/2017/08/29/Building-an-OMS-metric-alert-to-Slack-bridge-with-Azure-functions) | 1 | 0.125 | 1 | 0 |
37684ee2bbb7ed7954e2fe4921a48f5fe25fa30d | contrib/dcs_download.sh | contrib/dcs_download.sh |
set -u
standard_setup ()
{
# $1 = source URL
# $2 = untarred directory
curl $1 | tar zxv && cd $2 && python setup.py install --prefix=/var/tmp/website
cd $WEBSITE_DIR
}
install_django ()
{
curl http://mulletron.uwcs.co.uk/django.tar | tar xv
}
install_python ()
{
curl http://mulletron.uwcs.co.uk/python26.tar | tar xv
}
install_markdown ()
{
standard_setup http://pypi.python.org/packages/source/M/Markdown/markdown-1.7.tar.gz markdown-1.7
}
install_docutils ()
{
wget http://docutils.sourceforge.net/docutils-snapshot.tgz && tar zxvf docutils-snapshot.tgz && cd docutils && python setup.py install --prefix=/var/tmp/website
cd $WEBSITE_DIR
}
WEBSITE_DIR=/var/tmp/website
export PYTHONPATH=$WEBSITE_DIR/lib/python2.4/site-packages/:$PYTHONPATH
mkdir -p $WEBSITE_DIR
cd $WEBSITE_DIR
install_python
install_django
install_markdown
install_docutils
|
standard_setup ()
{
# $1 = source URL
# $2 = untarred directory
curl $1 | tar zxv && cd $2 && python setup.py install --prefix=/var/tmp/website
cd $WEBSITE_DIR
}
install_django ()
{
standard_setup http://media.djangoproject.com/releases/1.0/Django-1.0.tar.gz Django-1.0
}
install_python ()
{
curl http://mulletron.uwcs.co.uk/python26.tar | tar xv
}
install_markdown ()
{
standard_setup http://pypi.python.org/packages/source/M/Markdown/markdown-1.7.tar.gz markdown-1.7
}
install_docutils ()
{
wget http://docutils.sourceforge.net/docutils-snapshot.tgz && tar zxvf docutils-snapshot.tgz && cd docutils && python setup.py install --prefix=/var/tmp/website
cd $WEBSITE_DIR
}
WEBSITE_DIR=/var/tmp/website
export PYTHONPATH=$WEBSITE_DIR/lib/python2.4/site-packages/:$PYTHONPATH
mkdir -p $WEBSITE_DIR
cd $WEBSITE_DIR
install_python
install_django
install_markdown
install_docutils
| Switch to using Django from the Intertubes. (This makes upgrading in the future easier) | Switch to using Django from the Intertubes. (This makes upgrading in the future easier)
| Shell | agpl-3.0 | UWCS/uwcs-website,UWCS/uwcs-website,UWCS/uwcs-website,UWCS/uwcs-website | shell | ## Code Before:
set -u
standard_setup ()
{
# $1 = source URL
# $2 = untarred directory
curl $1 | tar zxv && cd $2 && python setup.py install --prefix=/var/tmp/website
cd $WEBSITE_DIR
}
install_django ()
{
curl http://mulletron.uwcs.co.uk/django.tar | tar xv
}
install_python ()
{
curl http://mulletron.uwcs.co.uk/python26.tar | tar xv
}
install_markdown ()
{
standard_setup http://pypi.python.org/packages/source/M/Markdown/markdown-1.7.tar.gz markdown-1.7
}
install_docutils ()
{
wget http://docutils.sourceforge.net/docutils-snapshot.tgz && tar zxvf docutils-snapshot.tgz && cd docutils && python setup.py install --prefix=/var/tmp/website
cd $WEBSITE_DIR
}
WEBSITE_DIR=/var/tmp/website
export PYTHONPATH=$WEBSITE_DIR/lib/python2.4/site-packages/:$PYTHONPATH
mkdir -p $WEBSITE_DIR
cd $WEBSITE_DIR
install_python
install_django
install_markdown
install_docutils
## Instruction:
Switch to using Django from the Intertubes. (This makes upgrading in the future easier)
## Code After:
standard_setup ()
{
# $1 = source URL
# $2 = untarred directory
curl $1 | tar zxv && cd $2 && python setup.py install --prefix=/var/tmp/website
cd $WEBSITE_DIR
}
install_django ()
{
standard_setup http://media.djangoproject.com/releases/1.0/Django-1.0.tar.gz Django-1.0
}
install_python ()
{
curl http://mulletron.uwcs.co.uk/python26.tar | tar xv
}
install_markdown ()
{
standard_setup http://pypi.python.org/packages/source/M/Markdown/markdown-1.7.tar.gz markdown-1.7
}
install_docutils ()
{
wget http://docutils.sourceforge.net/docutils-snapshot.tgz && tar zxvf docutils-snapshot.tgz && cd docutils && python setup.py install --prefix=/var/tmp/website
cd $WEBSITE_DIR
}
WEBSITE_DIR=/var/tmp/website
export PYTHONPATH=$WEBSITE_DIR/lib/python2.4/site-packages/:$PYTHONPATH
mkdir -p $WEBSITE_DIR
cd $WEBSITE_DIR
install_python
install_django
install_markdown
install_docutils
| -
- set -u
standard_setup ()
{
# $1 = source URL
# $2 = untarred directory
curl $1 | tar zxv && cd $2 && python setup.py install --prefix=/var/tmp/website
cd $WEBSITE_DIR
}
install_django ()
{
- curl http://mulletron.uwcs.co.uk/django.tar | tar xv
+ standard_setup http://media.djangoproject.com/releases/1.0/Django-1.0.tar.gz Django-1.0
}
install_python ()
{
curl http://mulletron.uwcs.co.uk/python26.tar | tar xv
}
install_markdown ()
{
standard_setup http://pypi.python.org/packages/source/M/Markdown/markdown-1.7.tar.gz markdown-1.7
}
install_docutils ()
{
wget http://docutils.sourceforge.net/docutils-snapshot.tgz && tar zxvf docutils-snapshot.tgz && cd docutils && python setup.py install --prefix=/var/tmp/website
cd $WEBSITE_DIR
}
WEBSITE_DIR=/var/tmp/website
export PYTHONPATH=$WEBSITE_DIR/lib/python2.4/site-packages/:$PYTHONPATH
mkdir -p $WEBSITE_DIR
cd $WEBSITE_DIR
install_python
install_django
install_markdown
install_docutils | 4 | 0.095238 | 1 | 3 |
4a015a2c4d0d8a2e9fbe267e10c8377d8980711f | src/components/AdditionalPropertiesEditor.jsx | src/components/AdditionalPropertiesEditor.jsx | import React from 'react'
import {connect} from 'react-redux'
import {matchSchema} from '../utilities'
import {INTERNAL_ID} from '../utilities/data'
import {subpath, last} from '../utilities/path'
import PropertyEditor from './PropertyEditor'
const AdditionalPropertiesEditor = ({schema, data, path, rootSchema}) => {
if (schema == null || schema === false || data == null) {
return null
}
return (
<div>
{Object.keys(data).filter(key => key !== INTERNAL_ID).map(key => {
const sub = subpath(path, key)
const title = last(sub)
return <PropertyEditor key={data[key].__id || sub} schema={matchSchema(schema.anyOf || [schema], data[key], rootSchema)} data={data[key]} title={title} path={sub} canEditKey={true} canRemove={true} />
})}
</div>
)
}
export default connect(({rootSchema}) => ({rootSchema}))(AdditionalPropertiesEditor)
| import React from 'react'
import {connect} from 'react-redux'
import {matchSchema} from '../utilities'
import {INTERNAL_ID} from '../utilities/data'
import {subpath, last} from '../utilities/path'
import PropertyEditor from './PropertyEditor'
const AdditionalPropertiesEditor = ({schema, data, path, rootSchema}) => {
if (schema == null || schema === false || data == null) {
return null
}
return (
<div>
{Object.keys(data).filter(key => key !== INTERNAL_ID).map(key => {
const sub = subpath(path, key)
const title = last(sub)
return <PropertyEditor key={data[key].__id || sub} schema={matchSchema(schema.anyOf || [schema], data[key], rootSchema)} data={data[key]} title={title} path={sub} canEditKey={data[key].__id} canRemove={true} />
})}
</div>
)
}
export default connect(({rootSchema}) => ({rootSchema}))(AdditionalPropertiesEditor)
| Make primitive property keys non-editable | Make primitive property keys non-editable
| JSX | mit | IngloriousCoderz/react-property-grid,IngloriousCoderz/react-property-grid | jsx | ## Code Before:
import React from 'react'
import {connect} from 'react-redux'
import {matchSchema} from '../utilities'
import {INTERNAL_ID} from '../utilities/data'
import {subpath, last} from '../utilities/path'
import PropertyEditor from './PropertyEditor'
const AdditionalPropertiesEditor = ({schema, data, path, rootSchema}) => {
if (schema == null || schema === false || data == null) {
return null
}
return (
<div>
{Object.keys(data).filter(key => key !== INTERNAL_ID).map(key => {
const sub = subpath(path, key)
const title = last(sub)
return <PropertyEditor key={data[key].__id || sub} schema={matchSchema(schema.anyOf || [schema], data[key], rootSchema)} data={data[key]} title={title} path={sub} canEditKey={true} canRemove={true} />
})}
</div>
)
}
export default connect(({rootSchema}) => ({rootSchema}))(AdditionalPropertiesEditor)
## Instruction:
Make primitive property keys non-editable
## Code After:
import React from 'react'
import {connect} from 'react-redux'
import {matchSchema} from '../utilities'
import {INTERNAL_ID} from '../utilities/data'
import {subpath, last} from '../utilities/path'
import PropertyEditor from './PropertyEditor'
const AdditionalPropertiesEditor = ({schema, data, path, rootSchema}) => {
if (schema == null || schema === false || data == null) {
return null
}
return (
<div>
{Object.keys(data).filter(key => key !== INTERNAL_ID).map(key => {
const sub = subpath(path, key)
const title = last(sub)
return <PropertyEditor key={data[key].__id || sub} schema={matchSchema(schema.anyOf || [schema], data[key], rootSchema)} data={data[key]} title={title} path={sub} canEditKey={data[key].__id} canRemove={true} />
})}
</div>
)
}
export default connect(({rootSchema}) => ({rootSchema}))(AdditionalPropertiesEditor)
| import React from 'react'
import {connect} from 'react-redux'
import {matchSchema} from '../utilities'
import {INTERNAL_ID} from '../utilities/data'
import {subpath, last} from '../utilities/path'
import PropertyEditor from './PropertyEditor'
const AdditionalPropertiesEditor = ({schema, data, path, rootSchema}) => {
if (schema == null || schema === false || data == null) {
return null
}
return (
<div>
{Object.keys(data).filter(key => key !== INTERNAL_ID).map(key => {
const sub = subpath(path, key)
const title = last(sub)
- return <PropertyEditor key={data[key].__id || sub} schema={matchSchema(schema.anyOf || [schema], data[key], rootSchema)} data={data[key]} title={title} path={sub} canEditKey={true} canRemove={true} />
? ^^^^
+ return <PropertyEditor key={data[key].__id || sub} schema={matchSchema(schema.anyOf || [schema], data[key], rootSchema)} data={data[key]} title={title} path={sub} canEditKey={data[key].__id} canRemove={true} />
? ^^^^^^^^^^^^^^
})}
</div>
)
}
export default connect(({rootSchema}) => ({rootSchema}))(AdditionalPropertiesEditor) | 2 | 0.08 | 1 | 1 |
27ef8368346e9416303006afe234d81e3c69dcae | soletta_module/samples/foosball/gtk_fuzzy.json | soletta_module/samples/foosball/gtk_fuzzy.json | {
"$schema": "http://solettaproject.github.io/soletta/schemas/config.schema",
"nodetypes": [
{
"name": "ButtonMain",
"type": "gtk/pushbutton"
},
{
"name": "ButtonNext",
"type": "gtk/pushbutton"
},
{
"name": "ButtonReset",
"type": "gtk/pushbutton"
},
{
"name": "Lcd",
"type": "gtk/label"
},
{
"name": "Led1",
"options": {
"rgb": "255|0|0"
},
"type": "gtk/led"
},
{
"name": "Led2",
"options": {
"rgb": "255|255|0"
},
"type": "gtk/led"
},
{
"name": "PushButton",
"type": "gtk/pushbutton"
},
{
"name": "Label",
"type": "gtk/label"
},
{
"name": "Led",
"type": "gtk/led"
},
{
"name": "SML",
"options": {
"stabilization_hits": 0,
"number_of_terms": "val:15"
},
"type": "machine-learning/fuzzy"
}
]
}
| {
"$schema": "http://solettaproject.github.io/soletta/schemas/config.schema",
"nodetypes": [
{
"name": "ButtonMain",
"type": "gtk/pushbutton"
},
{
"name": "ButtonNext",
"type": "gtk/pushbutton"
},
{
"name": "ButtonReset",
"type": "gtk/pushbutton"
},
{
"name": "Lcd",
"type": "gtk/label"
},
{
"name": "Led1",
"options": {
"rgb": {
"red": 255,
"green": 0,
"blue": 0
}
},
"type": "gtk/led"
},
{
"name": "Led2",
"options": {
"rgb": {
"red": 255,
"green": 255,
"blue": 0
}
},
"type": "gtk/led"
},
{
"name": "PushButton",
"type": "gtk/pushbutton"
},
{
"name": "Label",
"type": "gtk/label"
},
{
"name": "Led",
"type": "gtk/led"
},
{
"name": "SML",
"options": {
"stabilization_hits": 0,
"number_of_terms": "val:15"
},
"type": "machine-learning/fuzzy"
}
]
}
| Set default rgb parameter correctly in gtk conf file | [soletta_module] Set default rgb parameter correctly in gtk conf file
Soletta gtk/led has changed and rgb option is no longer a string. Now it
is a regular rgb field
Signed-off-by: Otavio Pontes <a04f37e3ccb281cf4119e4b2fa8286ed94121d92@intel.com>
| JSON | apache-2.0 | solettaproject/soletta-machine-learning,anselmolsm/soletta-machine-learning,solettaproject/soletta-machine-learning,anselmolsm/soletta-machine-learning,anselmolsm/soletta-machine-learning,solettaproject/soletta-machine-learning | json | ## Code Before:
{
"$schema": "http://solettaproject.github.io/soletta/schemas/config.schema",
"nodetypes": [
{
"name": "ButtonMain",
"type": "gtk/pushbutton"
},
{
"name": "ButtonNext",
"type": "gtk/pushbutton"
},
{
"name": "ButtonReset",
"type": "gtk/pushbutton"
},
{
"name": "Lcd",
"type": "gtk/label"
},
{
"name": "Led1",
"options": {
"rgb": "255|0|0"
},
"type": "gtk/led"
},
{
"name": "Led2",
"options": {
"rgb": "255|255|0"
},
"type": "gtk/led"
},
{
"name": "PushButton",
"type": "gtk/pushbutton"
},
{
"name": "Label",
"type": "gtk/label"
},
{
"name": "Led",
"type": "gtk/led"
},
{
"name": "SML",
"options": {
"stabilization_hits": 0,
"number_of_terms": "val:15"
},
"type": "machine-learning/fuzzy"
}
]
}
## Instruction:
[soletta_module] Set default rgb parameter correctly in gtk conf file
Soletta gtk/led has changed and rgb option is no longer a string. Now it
is a regular rgb field
Signed-off-by: Otavio Pontes <a04f37e3ccb281cf4119e4b2fa8286ed94121d92@intel.com>
## Code After:
{
"$schema": "http://solettaproject.github.io/soletta/schemas/config.schema",
"nodetypes": [
{
"name": "ButtonMain",
"type": "gtk/pushbutton"
},
{
"name": "ButtonNext",
"type": "gtk/pushbutton"
},
{
"name": "ButtonReset",
"type": "gtk/pushbutton"
},
{
"name": "Lcd",
"type": "gtk/label"
},
{
"name": "Led1",
"options": {
"rgb": {
"red": 255,
"green": 0,
"blue": 0
}
},
"type": "gtk/led"
},
{
"name": "Led2",
"options": {
"rgb": {
"red": 255,
"green": 255,
"blue": 0
}
},
"type": "gtk/led"
},
{
"name": "PushButton",
"type": "gtk/pushbutton"
},
{
"name": "Label",
"type": "gtk/label"
},
{
"name": "Led",
"type": "gtk/led"
},
{
"name": "SML",
"options": {
"stabilization_hits": 0,
"number_of_terms": "val:15"
},
"type": "machine-learning/fuzzy"
}
]
}
| {
"$schema": "http://solettaproject.github.io/soletta/schemas/config.schema",
"nodetypes": [
{
"name": "ButtonMain",
"type": "gtk/pushbutton"
},
{
"name": "ButtonNext",
"type": "gtk/pushbutton"
},
{
"name": "ButtonReset",
"type": "gtk/pushbutton"
},
{
"name": "Lcd",
"type": "gtk/label"
},
{
"name": "Led1",
"options": {
- "rgb": "255|0|0"
+ "rgb": {
+ "red": 255,
+ "green": 0,
+ "blue": 0
+ }
},
"type": "gtk/led"
},
{
"name": "Led2",
"options": {
- "rgb": "255|255|0"
+ "rgb": {
+ "red": 255,
+ "green": 255,
+ "blue": 0
+ }
},
"type": "gtk/led"
},
{
"name": "PushButton",
"type": "gtk/pushbutton"
},
{
"name": "Label",
"type": "gtk/label"
},
{
"name": "Led",
"type": "gtk/led"
},
{
"name": "SML",
"options": {
"stabilization_hits": 0,
"number_of_terms": "val:15"
},
"type": "machine-learning/fuzzy"
}
]
} | 12 | 0.218182 | 10 | 2 |
59dda27766ae7827f9f1795542079a4d8044f6cb | package.json | package.json | {
"name": "pump.io-client-app"
"version": "0.1.0-alpha.1",
"dependencies": {
"express": "2.5.x",
"utml": "0.2.x",
"underscore": "1.4.x",
"webfinger": "0.1.x",
"databank": "0.19.x",
"underscore": "1.5.x",
"dialback-client": ">=0.1.1",
"oauth": "0.9.x",
"async": "0.2.x",
"connect-databank": "0.12.x",
"node-uuid": "1.4.x",
"bunyan": "0.21.x"
}
}
| {
"name": "pump.io-client-app"
"version": "0.1.0-alpha.1",
"main": "./lib/app.js",
"dependencies": {
"express": "2.5.x",
"utml": "0.2.x",
"underscore": "1.4.x",
"webfinger": "0.1.x",
"databank": "0.19.x",
"underscore": "1.5.x",
"dialback-client": ">=0.1.1",
"oauth": "0.9.x",
"async": "0.2.x",
"connect-databank": "0.12.x",
"node-uuid": "1.4.x",
"bunyan": "0.21.x"
}
}
| Add lib/app as the main entrypoint | Add lib/app as the main entrypoint
| JSON | apache-2.0 | e14n/pump.io-client-app | json | ## Code Before:
{
"name": "pump.io-client-app"
"version": "0.1.0-alpha.1",
"dependencies": {
"express": "2.5.x",
"utml": "0.2.x",
"underscore": "1.4.x",
"webfinger": "0.1.x",
"databank": "0.19.x",
"underscore": "1.5.x",
"dialback-client": ">=0.1.1",
"oauth": "0.9.x",
"async": "0.2.x",
"connect-databank": "0.12.x",
"node-uuid": "1.4.x",
"bunyan": "0.21.x"
}
}
## Instruction:
Add lib/app as the main entrypoint
## Code After:
{
"name": "pump.io-client-app"
"version": "0.1.0-alpha.1",
"main": "./lib/app.js",
"dependencies": {
"express": "2.5.x",
"utml": "0.2.x",
"underscore": "1.4.x",
"webfinger": "0.1.x",
"databank": "0.19.x",
"underscore": "1.5.x",
"dialback-client": ">=0.1.1",
"oauth": "0.9.x",
"async": "0.2.x",
"connect-databank": "0.12.x",
"node-uuid": "1.4.x",
"bunyan": "0.21.x"
}
}
| {
"name": "pump.io-client-app"
"version": "0.1.0-alpha.1",
+ "main": "./lib/app.js",
"dependencies": {
"express": "2.5.x",
"utml": "0.2.x",
"underscore": "1.4.x",
"webfinger": "0.1.x",
"databank": "0.19.x",
"underscore": "1.5.x",
"dialback-client": ">=0.1.1",
"oauth": "0.9.x",
"async": "0.2.x",
"connect-databank": "0.12.x",
"node-uuid": "1.4.x",
"bunyan": "0.21.x"
}
} | 1 | 0.055556 | 1 | 0 |
03f918fca00d2a13effea96ecd4ba7f39959b46c | compute/api/runTests.ps1 | compute/api/runTests.ps1 |
dotnet restore --force
dotnet test --no-restore --test-adapter-path:. --logger:junit 2>&1 | %{ "$_" } | import-module -DisableNameChecking ..\..\..\BuildTools.psm1
Set-TestTimeout 600
dotnet restore --force
dotnet test --no-restore --test-adapter-path:. --logger:junit 2>&1 | %{ "$_" } | Increase the overall timeout for the full test run. - If several instances of tests are running in parallel, the usage export bucket tests will need to retry some, and that will consume time. | tests(Compute): Increase the overall timeout for the full test run.
- If several instances of tests are running in parallel, the usage export bucket tests will need to retry some, and that will consume time.
| PowerShell | apache-2.0 | GoogleCloudPlatform/dotnet-docs-samples,GoogleCloudPlatform/dotnet-docs-samples,GoogleCloudPlatform/dotnet-docs-samples,GoogleCloudPlatform/dotnet-docs-samples | powershell | ## Code Before:
dotnet restore --force
dotnet test --no-restore --test-adapter-path:. --logger:junit 2>&1 | %{ "$_" }
## Instruction:
tests(Compute): Increase the overall timeout for the full test run.
- If several instances of tests are running in parallel, the usage export bucket tests will need to retry some, and that will consume time.
## Code After:
import-module -DisableNameChecking ..\..\..\BuildTools.psm1
Set-TestTimeout 600
dotnet restore --force
dotnet test --no-restore --test-adapter-path:. --logger:junit 2>&1 | %{ "$_" } | + import-module -DisableNameChecking ..\..\..\BuildTools.psm1
+
+ Set-TestTimeout 600
dotnet restore --force
dotnet test --no-restore --test-adapter-path:. --logger:junit 2>&1 | %{ "$_" } | 3 | 1 | 3 | 0 |
0021ff0d810452a9cef69ac6d6c0bc0125119928 | app/controllers/admin/content/translations.js | app/controllers/admin/content/translations.js | import Controller from '@ember/controller';
export default Controller.extend({
isLoading: false,
actions: {
translationsDownload() {
this.set('isLoading', true);
this.get('loader')
.downloadFile('/admin/content/translations/all')
.then(() => {
this.get('notify').success(this.get('l10n').t('Translations Zip generated successfully.'));
})
.catch(e => {
console.warn(e);
this.get('notify').error(this.get('l10n').t('Unexpected error occurred.'));
})
.finally(() => {
this.set('isLoading', false);
});
}
}
});
| import Controller from '@ember/controller';
export default Controller.extend({
isLoading: false,
actions: {
translationsDownload() {
this.set('isLoading', true);
this.get('loader')
.downloadFile('/admin/content/translations/all')
.then(res => {
const anchor = document.createElement('a');
anchor.style.display = 'none';
anchor.href = `data:text/plain;charset=utf-8,${encodeURIComponent(res)}`;
anchor.download = 'Translations.zip';
anchor.click();
this.get('notify').success(this.get('l10n').t('Translations Zip generated successfully.'));
})
.catch(e => {
console.warn(e);
this.get('notify').error(this.get('l10n').t('Unexpected error occurred.'));
})
.finally(() => {
this.set('isLoading', false);
});
}
}
});
| Refactor translation downloads according to downloadFile service | Refactor translation downloads according to downloadFile service
| JavaScript | apache-2.0 | CosmicCoder96/open-event-frontend,ritikamotwani/open-event-frontend,CosmicCoder96/open-event-frontend,ritikamotwani/open-event-frontend,ritikamotwani/open-event-frontend,CosmicCoder96/open-event-frontend | javascript | ## Code Before:
import Controller from '@ember/controller';
export default Controller.extend({
isLoading: false,
actions: {
translationsDownload() {
this.set('isLoading', true);
this.get('loader')
.downloadFile('/admin/content/translations/all')
.then(() => {
this.get('notify').success(this.get('l10n').t('Translations Zip generated successfully.'));
})
.catch(e => {
console.warn(e);
this.get('notify').error(this.get('l10n').t('Unexpected error occurred.'));
})
.finally(() => {
this.set('isLoading', false);
});
}
}
});
## Instruction:
Refactor translation downloads according to downloadFile service
## Code After:
import Controller from '@ember/controller';
export default Controller.extend({
isLoading: false,
actions: {
translationsDownload() {
this.set('isLoading', true);
this.get('loader')
.downloadFile('/admin/content/translations/all')
.then(res => {
const anchor = document.createElement('a');
anchor.style.display = 'none';
anchor.href = `data:text/plain;charset=utf-8,${encodeURIComponent(res)}`;
anchor.download = 'Translations.zip';
anchor.click();
this.get('notify').success(this.get('l10n').t('Translations Zip generated successfully.'));
})
.catch(e => {
console.warn(e);
this.get('notify').error(this.get('l10n').t('Unexpected error occurred.'));
})
.finally(() => {
this.set('isLoading', false);
});
}
}
});
| import Controller from '@ember/controller';
export default Controller.extend({
isLoading: false,
actions: {
translationsDownload() {
this.set('isLoading', true);
this.get('loader')
.downloadFile('/admin/content/translations/all')
- .then(() => {
? ^^
+ .then(res => {
? ^^^
+ const anchor = document.createElement('a');
+ anchor.style.display = 'none';
+ anchor.href = `data:text/plain;charset=utf-8,${encodeURIComponent(res)}`;
+ anchor.download = 'Translations.zip';
+ anchor.click();
this.get('notify').success(this.get('l10n').t('Translations Zip generated successfully.'));
})
.catch(e => {
console.warn(e);
this.get('notify').error(this.get('l10n').t('Unexpected error occurred.'));
})
.finally(() => {
this.set('isLoading', false);
});
}
}
}); | 7 | 0.304348 | 6 | 1 |
7a83fe91bbfa8e709a6fccea466c2ede5db2051c | bucket/resharper-clt.json | bucket/resharper-clt.json | {
"version": "2016.2",
"homepage": "https://www.jetbrains.com/resharper/download/index.html#section=resharper-clt",
"license": "https://www.jetbrains.com/resharper/buy/command_line_license.html",
"url": "https://download.jetbrains.com/resharper/JetBrains.ReSharper.CommandLineTools.2016.2.20160818.172304.zip",
"hash": "060cae3b5bb9000465f6e402e12da4c15e3c1339ad4bd6178b6e69d754ccc937",
"bin": ["dupfinder.exe", "inspectcode.exe"],
"checkver": {
"url": "https://www.jetbrains.com/help/resharper/Introduction__Whats_New.html",
"re": "<h2>ReSharper/ReSharper\\s+C\\+\\+\\s+((\\d+\\.*)+)</h2>"
}
}
| {
"version": "2016.3",
"homepage": "https://www.jetbrains.com/resharper/download/index.html#section=resharper-clt",
"license": "https://www.jetbrains.com/resharper/buy/command_line_license.html",
"url": "https://download.jetbrains.com/resharper/JetBrains.ReSharper.CommandLineTools.2016.3.20161223.160402.zip",
"hash": "280a785fa07b37c5b2acfc6a4fcb7e9d02a531649e65cc02c246f181ed015804",
"bin": ["dupfinder.exe", "inspectcode.exe"],
"checkver": {
"url": "https://www.jetbrains.com/help/resharper/Introduction__Whats_New.html",
"re": "<h2>ReSharper/ReSharper\\s+C\\+\\+\\s+((\\d+\\.*)+)</h2>"
}
}
| Update Resharper CLT to 2016.3.1 | Update Resharper CLT to 2016.3.1
| JSON | unlicense | kodybrown/scoop,toxeus/scoop,Congee/scoop,Cyianor/scoop,nikolasd/scoop,vinaynambiar/lukesampson-,r15ch13/scoop,yunspace/scoop,rodericktech/scoop,berwyn/scoop,rasa/scoop,reelsense/scoop,coonce/scoop,lukesampson/scoop,bpollack/scoop,vidarkongsli/scoop,nueko/scoop | json | ## Code Before:
{
"version": "2016.2",
"homepage": "https://www.jetbrains.com/resharper/download/index.html#section=resharper-clt",
"license": "https://www.jetbrains.com/resharper/buy/command_line_license.html",
"url": "https://download.jetbrains.com/resharper/JetBrains.ReSharper.CommandLineTools.2016.2.20160818.172304.zip",
"hash": "060cae3b5bb9000465f6e402e12da4c15e3c1339ad4bd6178b6e69d754ccc937",
"bin": ["dupfinder.exe", "inspectcode.exe"],
"checkver": {
"url": "https://www.jetbrains.com/help/resharper/Introduction__Whats_New.html",
"re": "<h2>ReSharper/ReSharper\\s+C\\+\\+\\s+((\\d+\\.*)+)</h2>"
}
}
## Instruction:
Update Resharper CLT to 2016.3.1
## Code After:
{
"version": "2016.3",
"homepage": "https://www.jetbrains.com/resharper/download/index.html#section=resharper-clt",
"license": "https://www.jetbrains.com/resharper/buy/command_line_license.html",
"url": "https://download.jetbrains.com/resharper/JetBrains.ReSharper.CommandLineTools.2016.3.20161223.160402.zip",
"hash": "280a785fa07b37c5b2acfc6a4fcb7e9d02a531649e65cc02c246f181ed015804",
"bin": ["dupfinder.exe", "inspectcode.exe"],
"checkver": {
"url": "https://www.jetbrains.com/help/resharper/Introduction__Whats_New.html",
"re": "<h2>ReSharper/ReSharper\\s+C\\+\\+\\s+((\\d+\\.*)+)</h2>"
}
}
| {
- "version": "2016.2",
? ^
+ "version": "2016.3",
? ^
"homepage": "https://www.jetbrains.com/resharper/download/index.html#section=resharper-clt",
"license": "https://www.jetbrains.com/resharper/buy/command_line_license.html",
- "url": "https://download.jetbrains.com/resharper/JetBrains.ReSharper.CommandLineTools.2016.2.20160818.172304.zip",
? ^ -- ^ ^^^
+ "url": "https://download.jetbrains.com/resharper/JetBrains.ReSharper.CommandLineTools.2016.3.20161223.160402.zip",
? ^ ^^^ ^ ++
- "hash": "060cae3b5bb9000465f6e402e12da4c15e3c1339ad4bd6178b6e69d754ccc937",
+ "hash": "280a785fa07b37c5b2acfc6a4fcb7e9d02a531649e65cc02c246f181ed015804",
"bin": ["dupfinder.exe", "inspectcode.exe"],
"checkver": {
"url": "https://www.jetbrains.com/help/resharper/Introduction__Whats_New.html",
"re": "<h2>ReSharper/ReSharper\\s+C\\+\\+\\s+((\\d+\\.*)+)</h2>"
}
} | 6 | 0.5 | 3 | 3 |
79d887d03dc47a791e25b51b6ccc2015955f3920 | spec/absorb_spec.rb | spec/absorb_spec.rb | require_relative 'spec_helper'
describe Absorb do
['test.txt', 'test2.txt'].each do |file|
describe "file" do
it "should store #{file} in s3" do
Absorb::AmazonS3.expects(:store_file).with file
Absorb.file file
end
end
end
end
| require_relative 'spec_helper'
describe Absorb do
end
| Remove failing tests, will bring them back in a bit. | Remove failing tests, will bring them back in a bit. | Ruby | mit | darrencauthon/absorb,darrencauthon/absorb,darrencauthon/absorb | ruby | ## Code Before:
require_relative 'spec_helper'
describe Absorb do
['test.txt', 'test2.txt'].each do |file|
describe "file" do
it "should store #{file} in s3" do
Absorb::AmazonS3.expects(:store_file).with file
Absorb.file file
end
end
end
end
## Instruction:
Remove failing tests, will bring them back in a bit.
## Code After:
require_relative 'spec_helper'
describe Absorb do
end
| require_relative 'spec_helper'
describe Absorb do
-
- ['test.txt', 'test2.txt'].each do |file|
-
- describe "file" do
-
- it "should store #{file} in s3" do
- Absorb::AmazonS3.expects(:store_file).with file
- Absorb.file file
- end
-
- end
-
- end
-
end | 14 | 0.777778 | 0 | 14 |
46cc4b1c05a0b5da1a0e3b7c5eda871680410100 | es2015/build-topic-element.js | es2015/build-topic-element.js | "use strict";
function buildTopicElement(topic){
let title = "<h2>" + topic.title + "</h2>";
let author = "<small>" + topic.author + "</small>";
let body = "<p>" + topic.body + "</p>";
return { title: title, author: author, body: body };
} | "use strict";
function buildTopicElement(topic){
let title = "<h2>" + topic.title + "</h2>";
let author = "<small>" + topic.author + "</small>";
let body = "<p>" + topic.body + "</p>";
return { title, author, body };
} | Refactor the code to use the object initializer shorthand | Refactor the code to use the object initializer shorthand
| JavaScript | mit | var-bin/reactjs-training,var-bin/reactjs-training,var-bin/reactjs-training | javascript | ## Code Before:
"use strict";
function buildTopicElement(topic){
let title = "<h2>" + topic.title + "</h2>";
let author = "<small>" + topic.author + "</small>";
let body = "<p>" + topic.body + "</p>";
return { title: title, author: author, body: body };
}
## Instruction:
Refactor the code to use the object initializer shorthand
## Code After:
"use strict";
function buildTopicElement(topic){
let title = "<h2>" + topic.title + "</h2>";
let author = "<small>" + topic.author + "</small>";
let body = "<p>" + topic.body + "</p>";
return { title, author, body };
} | "use strict";
function buildTopicElement(topic){
let title = "<h2>" + topic.title + "</h2>";
let author = "<small>" + topic.author + "</small>";
let body = "<p>" + topic.body + "</p>";
- return { title: title, author: author, body: body };
? ------- -------- ------
+ return { title, author, body };
} | 2 | 0.222222 | 1 | 1 |
b1466ccd4a3820749fdb2814f10773bc82947558 | metadata/com.github.gschwind.fiddle_assistant.yml | metadata/com.github.gschwind.fiddle_assistant.yml | Categories:
- Science & Education
License: GPL-3.0-or-later
AuthorName: Benoit Gschwind
AuthorEmail: gschwind@gnu-log.net
SourceCode: https://github.com/gschwind/fiddle-assistant
IssueTracker: https://github.com/gschwind/fiddle-assistant/issues
AutoName: Fiddle Assistant
Description: |
Music tuner with a logging feature that help to check if the user play in tune.
RepoType: git
Repo: https://github.com/gschwind/fiddle-assistant.git
Builds:
- versionName: 1.0.0
versionCode: 100
commit: 1.0.0
subdir: app
gradle:
- yes
ndk: r21d
AutoUpdateMode: Version %v
UpdateCheckMode: Tags \d+\.\d+(\.\d+)?
CurrentVersion: 1.0.0
CurrentVersionCode: 100
| Categories:
- Science & Education
License: GPL-3.0-or-later
AuthorName: Benoit Gschwind
AuthorEmail: gschwind@gnu-log.net
SourceCode: https://github.com/gschwind/fiddle-assistant
IssueTracker: https://github.com/gschwind/fiddle-assistant/issues
AutoName: Fiddle Assistant
Description: |
Music tuner with a logging feature that help to check if the user play in tune.
RepoType: git
Repo: https://github.com/gschwind/fiddle-assistant.git
Builds:
- versionName: 1.0.0
versionCode: 100
commit: 1.0.0
subdir: app
gradle:
- yes
ndk: r21d
- versionName: 1.1.0
versionCode: 110
commit: 1.1.0
subdir: app
gradle:
- yes
ndk: r21d
AutoUpdateMode: Version %v
UpdateCheckMode: Tags \d+\.\d+(\.\d+)?
CurrentVersion: 1.1.0
CurrentVersionCode: 110
| Update Fiddle Assistant to 1.1.0 (110) | Update Fiddle Assistant to 1.1.0 (110)
| YAML | agpl-3.0 | f-droid/fdroiddata,f-droid/fdroiddata | yaml | ## Code Before:
Categories:
- Science & Education
License: GPL-3.0-or-later
AuthorName: Benoit Gschwind
AuthorEmail: gschwind@gnu-log.net
SourceCode: https://github.com/gschwind/fiddle-assistant
IssueTracker: https://github.com/gschwind/fiddle-assistant/issues
AutoName: Fiddle Assistant
Description: |
Music tuner with a logging feature that help to check if the user play in tune.
RepoType: git
Repo: https://github.com/gschwind/fiddle-assistant.git
Builds:
- versionName: 1.0.0
versionCode: 100
commit: 1.0.0
subdir: app
gradle:
- yes
ndk: r21d
AutoUpdateMode: Version %v
UpdateCheckMode: Tags \d+\.\d+(\.\d+)?
CurrentVersion: 1.0.0
CurrentVersionCode: 100
## Instruction:
Update Fiddle Assistant to 1.1.0 (110)
## Code After:
Categories:
- Science & Education
License: GPL-3.0-or-later
AuthorName: Benoit Gschwind
AuthorEmail: gschwind@gnu-log.net
SourceCode: https://github.com/gschwind/fiddle-assistant
IssueTracker: https://github.com/gschwind/fiddle-assistant/issues
AutoName: Fiddle Assistant
Description: |
Music tuner with a logging feature that help to check if the user play in tune.
RepoType: git
Repo: https://github.com/gschwind/fiddle-assistant.git
Builds:
- versionName: 1.0.0
versionCode: 100
commit: 1.0.0
subdir: app
gradle:
- yes
ndk: r21d
- versionName: 1.1.0
versionCode: 110
commit: 1.1.0
subdir: app
gradle:
- yes
ndk: r21d
AutoUpdateMode: Version %v
UpdateCheckMode: Tags \d+\.\d+(\.\d+)?
CurrentVersion: 1.1.0
CurrentVersionCode: 110
| Categories:
- Science & Education
License: GPL-3.0-or-later
AuthorName: Benoit Gschwind
AuthorEmail: gschwind@gnu-log.net
SourceCode: https://github.com/gschwind/fiddle-assistant
IssueTracker: https://github.com/gschwind/fiddle-assistant/issues
AutoName: Fiddle Assistant
Description: |
Music tuner with a logging feature that help to check if the user play in tune.
RepoType: git
Repo: https://github.com/gschwind/fiddle-assistant.git
Builds:
- versionName: 1.0.0
versionCode: 100
commit: 1.0.0
subdir: app
gradle:
- yes
ndk: r21d
+ - versionName: 1.1.0
+ versionCode: 110
+ commit: 1.1.0
+ subdir: app
+ gradle:
+ - yes
+ ndk: r21d
+
AutoUpdateMode: Version %v
UpdateCheckMode: Tags \d+\.\d+(\.\d+)?
- CurrentVersion: 1.0.0
? ^
+ CurrentVersion: 1.1.0
? ^
- CurrentVersionCode: 100
? -
+ CurrentVersionCode: 110
? +
| 12 | 0.428571 | 10 | 2 |
b989262d9912c6c9df2a32d561dcc8e554704dfc | 2016/day4/day4.go | 2016/day4/day4.go | package day4
import (
"strings"
"github.com/domdavis/adventofcode/2016/day4/room"
)
func Solution() string {
return "Not yet solved"
}
func sum(input string) int {
total := 0
for _, code := range strings.Split(input, "\n") {
room := room.New(code)
if room.Real {
total += room.Sector
}
}
return total
}
| package day4
import (
"strings"
"strconv"
"github.com/domdavis/adventofcode/2016/day4/room"
)
func Solution() string {
return strconv.Itoa(sum(data))
}
func sum(input string) int {
total := 0
for _, code := range strings.Split(input, "\n") {
room := room.New(code)
if room.Real {
total += room.Sector
}
}
return total
}
| Add part 1 of day 4 test | Add part 1 of day 4 test
| Go | mit | domdavis/adventofcode | go | ## Code Before:
package day4
import (
"strings"
"github.com/domdavis/adventofcode/2016/day4/room"
)
func Solution() string {
return "Not yet solved"
}
func sum(input string) int {
total := 0
for _, code := range strings.Split(input, "\n") {
room := room.New(code)
if room.Real {
total += room.Sector
}
}
return total
}
## Instruction:
Add part 1 of day 4 test
## Code After:
package day4
import (
"strings"
"strconv"
"github.com/domdavis/adventofcode/2016/day4/room"
)
func Solution() string {
return strconv.Itoa(sum(data))
}
func sum(input string) int {
total := 0
for _, code := range strings.Split(input, "\n") {
room := room.New(code)
if room.Real {
total += room.Sector
}
}
return total
}
| package day4
import (
"strings"
+ "strconv"
+
"github.com/domdavis/adventofcode/2016/day4/room"
)
func Solution() string {
- return "Not yet solved"
+ return strconv.Itoa(sum(data))
}
func sum(input string) int {
total := 0
for _, code := range strings.Split(input, "\n") {
room := room.New(code)
if room.Real {
total += room.Sector
}
}
return total
} | 4 | 0.166667 | 3 | 1 |
91adca72daf8d913b4cb6ca6e3454a9f9b0ddf61 | README.md | README.md |
A uxebu workshop taking place the first time July 6th+7th 2015 in Munich
http://www.uxebu.com/all-workshops/es6-and-react-js/
| [](https://travis-ci.org/wolframkriesing/es6-react-workshop)
# ES6 in use with react.js
A uxebu workshop taking place the first time July 6th+7th 2015 in Munich
http://www.uxebu.com/all-workshops/es6-and-react-js/
| Add travis badge, so on the repo page the status is visible right away. | Add travis badge, so on the repo page the status is visible right away. | Markdown | mit | wolframkriesing/es6-react-workshop,wolframkriesing/es6-react-workshop | markdown | ## Code Before:
A uxebu workshop taking place the first time July 6th+7th 2015 in Munich
http://www.uxebu.com/all-workshops/es6-and-react-js/
## Instruction:
Add travis badge, so on the repo page the status is visible right away.
## Code After:
[](https://travis-ci.org/wolframkriesing/es6-react-workshop)
# ES6 in use with react.js
A uxebu workshop taking place the first time July 6th+7th 2015 in Munich
http://www.uxebu.com/all-workshops/es6-and-react-js/
| + [](https://travis-ci.org/wolframkriesing/es6-react-workshop)
+
+ # ES6 in use with react.js
A uxebu workshop taking place the first time July 6th+7th 2015 in Munich
http://www.uxebu.com/all-workshops/es6-and-react-js/ | 3 | 1 | 3 | 0 |
4fbd869fb910a068e2a6ee8505b41334ce8a5e40 | POC/completion.js | POC/completion.js | const prompt = require('prompt')
const sha1 = require('sha1')
prompt.start()
prompt.get(['username', 'level'], function(err, result) {
if(err) throw new Error(err)
let sha1text = sha1(result['username'] + result['level'])
const getDigits = new RegExp(/\d/, 'g');
let str = ""
for(let i = 0; i < 4; i++) {
str += getDigits.exec(sha1text)
}
console.log(str)
return str
})
// 3935
| const prompt = require('prompt')
const sha1 = require('sha1')
prompt.start()
// Am i overdoing this?
prompt.get(['username', 'level'], function(err, result) {
if(err) throw new Error(err)
let usernameEncrypt = ""
const reId = new RegExp(/(.{4})(.{4})/)
for(let i = 0; i < 4; i++) {
let match = reId.exec(result['username'])
usernameEncrypt += match[1][i] + match[2][i]
}
console.log(usernameEncrypt)
let sha1text = sha1(usernameEncrypt + result['level'])
let output = ""
const getDigits = new RegExp(/\d/, 'g');
for(let i = 0; i < 4; i++) {
output += getDigits.exec(sha1text)
}
console.log(output)
return output
})
// 3935
| Make it harder t (longwinded) to crack for the smart kids | Make it harder t (longwinded) to crack for the smart kids
| JavaScript | mit | saturninoharris/primer-design,saturninoharris/primer-design | javascript | ## Code Before:
const prompt = require('prompt')
const sha1 = require('sha1')
prompt.start()
prompt.get(['username', 'level'], function(err, result) {
if(err) throw new Error(err)
let sha1text = sha1(result['username'] + result['level'])
const getDigits = new RegExp(/\d/, 'g');
let str = ""
for(let i = 0; i < 4; i++) {
str += getDigits.exec(sha1text)
}
console.log(str)
return str
})
// 3935
## Instruction:
Make it harder t (longwinded) to crack for the smart kids
## Code After:
const prompt = require('prompt')
const sha1 = require('sha1')
prompt.start()
// Am i overdoing this?
prompt.get(['username', 'level'], function(err, result) {
if(err) throw new Error(err)
let usernameEncrypt = ""
const reId = new RegExp(/(.{4})(.{4})/)
for(let i = 0; i < 4; i++) {
let match = reId.exec(result['username'])
usernameEncrypt += match[1][i] + match[2][i]
}
console.log(usernameEncrypt)
let sha1text = sha1(usernameEncrypt + result['level'])
let output = ""
const getDigits = new RegExp(/\d/, 'g');
for(let i = 0; i < 4; i++) {
output += getDigits.exec(sha1text)
}
console.log(output)
return output
})
// 3935
| const prompt = require('prompt')
const sha1 = require('sha1')
prompt.start()
-
+ // Am i overdoing this?
prompt.get(['username', 'level'], function(err, result) {
if(err) throw new Error(err)
+ let usernameEncrypt = ""
+ const reId = new RegExp(/(.{4})(.{4})/)
+ for(let i = 0; i < 4; i++) {
+ let match = reId.exec(result['username'])
+ usernameEncrypt += match[1][i] + match[2][i]
+ }
+ console.log(usernameEncrypt)
- let sha1text = sha1(result['username'] + result['level'])
? -------- ^^
+ let sha1text = sha1(usernameEncrypt + result['level'])
? ^^^^^^^
+ let output = ""
const getDigits = new RegExp(/\d/, 'g');
- let str = ""
for(let i = 0; i < 4; i++) {
- str += getDigits.exec(sha1text)
? ^ ^
+ output += getDigits.exec(sha1text)
? ^^ ^^^
}
- console.log(str)
? ^ ^
+ console.log(output)
? ^^ ^^^
- return str
+ return output
})
// 3935 | 19 | 1.055556 | 13 | 6 |
860efa55c3a06d2dfc0ec96f50f1a5755e173d67 | news.md | news.md | ---
layout: default
---
# News
What's up?
### Semana Nacional de Ciencia e Tecnologia
O grupo de Computacao Musical do IME-USP apresentara alguns de seus projetos entre os dias 23 e 28 de outubro no Centro de Difusao Internacional da USP. Mais informacoes em [http://usp.br/semanact/2017/](http://usp.br/semanact/2017/).
[back](./) | ---
layout: default
---
# News
What's up?
## First installation of Arch Linux
Yesterday I began my exploration into Linux Kernel. My first task was install a virtualisation tool: Qemu. Then, I create a image for Arch Linux.
I followed these tutorials:
- Intall Qemu and start the image for Arch Linux: https://oslongjourney.github.io/linux-kernel/use-qemu-to-play-with-linux/
- Arch Linux Instalation: https://itsfoss.com/install-arch-linux/
- SSH configuration: https://wiki.archlinux.org/index.php/Secure_Shell
I do not want to work inside the Qemu, so I defined my root to be accessible by ssh. Since my machine is only for tests, it is not a problem.
One very important tip: _MAKE A BACKUP!_
I already did mine! And now I am looking forward to start learning about how to compile and modify Linux Kernel!
## Semana Nacional de Ciencia e Tecnologia
O grupo de Computacao Musical do IME-USP apresentara alguns de seus projetos entre os dias 23 e 28 de outubro no Centro de Difusao Internacional da USP. Mais informacoes em [http://usp.br/semanact/2017/](http://usp.br/semanact/2017/).
[back](./)
| Add my first installation of Arch Linux | Add my first installation of Arch Linux | Markdown | cc0-1.0 | shayenne/shayenne.github.io,shayenne/shayenne.github.io,shayenne/shayenne.github.io | markdown | ## Code Before:
---
layout: default
---
# News
What's up?
### Semana Nacional de Ciencia e Tecnologia
O grupo de Computacao Musical do IME-USP apresentara alguns de seus projetos entre os dias 23 e 28 de outubro no Centro de Difusao Internacional da USP. Mais informacoes em [http://usp.br/semanact/2017/](http://usp.br/semanact/2017/).
[back](./)
## Instruction:
Add my first installation of Arch Linux
## Code After:
---
layout: default
---
# News
What's up?
## First installation of Arch Linux
Yesterday I began my exploration into Linux Kernel. My first task was install a virtualisation tool: Qemu. Then, I create a image for Arch Linux.
I followed these tutorials:
- Intall Qemu and start the image for Arch Linux: https://oslongjourney.github.io/linux-kernel/use-qemu-to-play-with-linux/
- Arch Linux Instalation: https://itsfoss.com/install-arch-linux/
- SSH configuration: https://wiki.archlinux.org/index.php/Secure_Shell
I do not want to work inside the Qemu, so I defined my root to be accessible by ssh. Since my machine is only for tests, it is not a problem.
One very important tip: _MAKE A BACKUP!_
I already did mine! And now I am looking forward to start learning about how to compile and modify Linux Kernel!
## Semana Nacional de Ciencia e Tecnologia
O grupo de Computacao Musical do IME-USP apresentara alguns de seus projetos entre os dias 23 e 28 de outubro no Centro de Difusao Internacional da USP. Mais informacoes em [http://usp.br/semanact/2017/](http://usp.br/semanact/2017/).
[back](./)
| ---
layout: default
---
# News
What's up?
+ ## First installation of Arch Linux
+
+ Yesterday I began my exploration into Linux Kernel. My first task was install a virtualisation tool: Qemu. Then, I create a image for Arch Linux.
+
+ I followed these tutorials:
+
+ - Intall Qemu and start the image for Arch Linux: https://oslongjourney.github.io/linux-kernel/use-qemu-to-play-with-linux/
+ - Arch Linux Instalation: https://itsfoss.com/install-arch-linux/
+ - SSH configuration: https://wiki.archlinux.org/index.php/Secure_Shell
+
+ I do not want to work inside the Qemu, so I defined my root to be accessible by ssh. Since my machine is only for tests, it is not a problem.
+
+ One very important tip: _MAKE A BACKUP!_
+
+ I already did mine! And now I am looking forward to start learning about how to compile and modify Linux Kernel!
+
- ### Semana Nacional de Ciencia e Tecnologia
? -
+ ## Semana Nacional de Ciencia e Tecnologia
O grupo de Computacao Musical do IME-USP apresentara alguns de seus projetos entre os dias 23 e 28 de outubro no Centro de Difusao Internacional da USP. Mais informacoes em [http://usp.br/semanact/2017/](http://usp.br/semanact/2017/).
[back](./) | 18 | 1.384615 | 17 | 1 |
edb059591d564bee66f1251549d6cb582f7cdc4c | app/models/gobierto_people.rb | app/models/gobierto_people.rb | module GobiertoPeople
def self.table_name_prefix
"gp_"
end
def self.classes_with_vocabularies
[GobiertoPeople::Person]
end
def self.searchable_models
[ GobiertoPeople::Person, GobiertoPeople::PersonPost, GobiertoPeople::PersonStatement ]
end
def self.module_submodules
%w(officials agendas blogs statements departments interest_groups trips gifts invitations)
end
def self.custom_engine_resources
%w(events invitations gifts trips)
end
def self.remote_calendar_integrations
%w( ibm_notes google_calendar microsoft_exchange )
end
end
| module GobiertoPeople
def self.table_name_prefix
"gp_"
end
def self.classes_with_vocabularies
[GobiertoPeople::Person]
end
def self.classes_with_custom_fields
[GobiertoPeople::Person]
end
def self.searchable_models
[ GobiertoPeople::Person, GobiertoPeople::PersonPost, GobiertoPeople::PersonStatement ]
end
def self.module_submodules
%w(officials agendas blogs statements departments interest_groups trips gifts invitations)
end
def self.custom_engine_resources
%w(events invitations gifts trips)
end
def self.remote_calendar_integrations
%w( ibm_notes google_calendar microsoft_exchange )
end
end
| Add GobiertoPeople::Person to classes with custom fields enabled | Add GobiertoPeople::Person to classes with custom fields enabled
| Ruby | agpl-3.0 | PopulateTools/gobierto,PopulateTools/gobierto,PopulateTools/gobierto-dev,PopulateTools/gobierto,PopulateTools/gobierto-dev,PopulateTools/gobierto-dev,PopulateTools/gobierto,PopulateTools/gobierto-dev | ruby | ## Code Before:
module GobiertoPeople
def self.table_name_prefix
"gp_"
end
def self.classes_with_vocabularies
[GobiertoPeople::Person]
end
def self.searchable_models
[ GobiertoPeople::Person, GobiertoPeople::PersonPost, GobiertoPeople::PersonStatement ]
end
def self.module_submodules
%w(officials agendas blogs statements departments interest_groups trips gifts invitations)
end
def self.custom_engine_resources
%w(events invitations gifts trips)
end
def self.remote_calendar_integrations
%w( ibm_notes google_calendar microsoft_exchange )
end
end
## Instruction:
Add GobiertoPeople::Person to classes with custom fields enabled
## Code After:
module GobiertoPeople
def self.table_name_prefix
"gp_"
end
def self.classes_with_vocabularies
[GobiertoPeople::Person]
end
def self.classes_with_custom_fields
[GobiertoPeople::Person]
end
def self.searchable_models
[ GobiertoPeople::Person, GobiertoPeople::PersonPost, GobiertoPeople::PersonStatement ]
end
def self.module_submodules
%w(officials agendas blogs statements departments interest_groups trips gifts invitations)
end
def self.custom_engine_resources
%w(events invitations gifts trips)
end
def self.remote_calendar_integrations
%w( ibm_notes google_calendar microsoft_exchange )
end
end
| module GobiertoPeople
def self.table_name_prefix
"gp_"
end
def self.classes_with_vocabularies
+ [GobiertoPeople::Person]
+ end
+
+ def self.classes_with_custom_fields
[GobiertoPeople::Person]
end
def self.searchable_models
[ GobiertoPeople::Person, GobiertoPeople::PersonPost, GobiertoPeople::PersonStatement ]
end
def self.module_submodules
%w(officials agendas blogs statements departments interest_groups trips gifts invitations)
end
def self.custom_engine_resources
%w(events invitations gifts trips)
end
def self.remote_calendar_integrations
%w( ibm_notes google_calendar microsoft_exchange )
end
end | 4 | 0.16 | 4 | 0 |
59cc454244dcd86943d896db17c544254ba1e1c9 | History.markdown | History.markdown |
* Force sitemap layout to be `nil` (#16)
|
* Force sitemap layout to be `nil` (#16)
## 0.4.0 / 2014-05-06
### Major Enhancements
* Support Jekyll 2.0 (#12)
## 0.3.0 / 2014-05-05
### Minor Enhancements
* Generate sitemap using html_pages (#10)
### Bug Fixes
* Remove stray sitemap.xsl from template (#8)
### Development Fixes
* Added travis (#6)
* Better timezone support (#7)
## 0.2.0 / 2014-03-24
* Loosen Jekyll requirement (#4)
## 0.1.0 / 2014-03-15
* Birthday!
| Add history up 'til now. | Add history up 'til now.
| Markdown | mit | Bootstragram/jekyll-sitemap,Bootstragram/jekyll-sitemap,jekyll/jekyll-sitemap,kevinoid/jekyll-sitemap,Bootstragram/jekyll-sitemap,jekyll/jekyll-sitemap,kevinoid/jekyll-sitemap | markdown | ## Code Before:
* Force sitemap layout to be `nil` (#16)
## Instruction:
Add history up 'til now.
## Code After:
* Force sitemap layout to be `nil` (#16)
## 0.4.0 / 2014-05-06
### Major Enhancements
* Support Jekyll 2.0 (#12)
## 0.3.0 / 2014-05-05
### Minor Enhancements
* Generate sitemap using html_pages (#10)
### Bug Fixes
* Remove stray sitemap.xsl from template (#8)
### Development Fixes
* Added travis (#6)
* Better timezone support (#7)
## 0.2.0 / 2014-03-24
* Loosen Jekyll requirement (#4)
## 0.1.0 / 2014-03-15
* Birthday!
|
* Force sitemap layout to be `nil` (#16)
+
+ ## 0.4.0 / 2014-05-06
+
+ ### Major Enhancements
+
+ * Support Jekyll 2.0 (#12)
+
+ ## 0.3.0 / 2014-05-05
+
+ ### Minor Enhancements
+
+ * Generate sitemap using html_pages (#10)
+
+ ### Bug Fixes
+
+ * Remove stray sitemap.xsl from template (#8)
+
+ ### Development Fixes
+
+ * Added travis (#6)
+ * Better timezone support (#7)
+
+ ## 0.2.0 / 2014-03-24
+
+ * Loosen Jekyll requirement (#4)
+
+ ## 0.1.0 / 2014-03-15
+
+ * Birthday! | 29 | 14.5 | 29 | 0 |
3aa71d1f0f3570dd513bfade79a15061afd9039d | README.md | README.md |
This is a minor-mode for Emacs for editing Python docstrings. It provides
syntax highlighting for docstrings in both ReStructuredText and Epydoc formats,
as well as an override for the fill-paragraph function when editing such a
docstring that will wrap things according to Python community convention.
|
[](https://travis-ci.org/glyph/python-docstring-mode)
This is a minor-mode for Emacs for editing Python docstrings. It provides
syntax highlighting for docstrings in both reStructuredText and Epydoc formats,
as well as an override for the fill-paragraph function when editing such a
docstring that will wrap things according to Python community convention.
| Make it worth it writing tests | Make it worth it writing tests
Also fix spelling of reST.
| Markdown | mit | glyph/python-docstring-mode,emacsmirror/python-docstring | markdown | ## Code Before:
This is a minor-mode for Emacs for editing Python docstrings. It provides
syntax highlighting for docstrings in both ReStructuredText and Epydoc formats,
as well as an override for the fill-paragraph function when editing such a
docstring that will wrap things according to Python community convention.
## Instruction:
Make it worth it writing tests
Also fix spelling of reST.
## Code After:
[](https://travis-ci.org/glyph/python-docstring-mode)
This is a minor-mode for Emacs for editing Python docstrings. It provides
syntax highlighting for docstrings in both reStructuredText and Epydoc formats,
as well as an override for the fill-paragraph function when editing such a
docstring that will wrap things according to Python community convention.
| +
+ [](https://travis-ci.org/glyph/python-docstring-mode)
This is a minor-mode for Emacs for editing Python docstrings. It provides
- syntax highlighting for docstrings in both ReStructuredText and Epydoc formats,
? ^
+ syntax highlighting for docstrings in both reStructuredText and Epydoc formats,
? ^
as well as an override for the fill-paragraph function when editing such a
docstring that will wrap things according to Python community convention. | 4 | 0.8 | 3 | 1 |
bd6a64a57f85933059231789ed6ec117bd1089da | ADLivelyCollectionView.h | ADLivelyCollectionView.h | //
// ADLivelyCollectionView.h
// ADLivelyCollectionView
//
// Created by Romain Goyet on 18/04/12.
// Copyright (c) 2012 Applidium. All rights reserved.
//
#import <UIKit/UIKit.h>
extern NSTimeInterval ADLivelyDefaultDuration;
typedef NSTimeInterval (^ADLivelyTransform)(CALayer * layer, float speed);
extern ADLivelyTransform ADLivelyTransformCurl;
extern ADLivelyTransform ADLivelyTransformFade;
extern ADLivelyTransform ADLivelyTransformFan;
extern ADLivelyTransform ADLivelyTransformFlip;
extern ADLivelyTransform ADLivelyTransformHelix;
extern ADLivelyTransform ADLivelyTransformTilt;
extern ADLivelyTransform ADLivelyTransformWave;
extern ADLivelyTransform ADLivelyTransformGrow;
@interface ADLivelyCollectionView : UICollectionView <UICollectionViewDelegate, UICollectionViewDataSource> {
id <UICollectionViewDelegate> _preLivelyDelegate;
id <UICollectionViewDataSource> _preLivelyDataSource;
CGPoint _lastScrollPosition;
CGPoint _currentScrollPosition;
ADLivelyTransform _transformBlock;
}
- (CGPoint)scrollSpeed;
- (void)setInitialCellTransformBlock:(ADLivelyTransform)block;
@property (nonatomic, assign) CGFloat speedThreshold;
@end
| //
// ADLivelyCollectionView.h
// ADLivelyCollectionView
//
// Created by Romain Goyet on 18/04/12.
// Copyright (c) 2012 Applidium. All rights reserved.
//
#import <UIKit/UIKit.h>
extern NSTimeInterval ADLivelyDefaultDuration;
typedef NSTimeInterval (^ADLivelyTransform)(CALayer * layer, float speed);
extern ADLivelyTransform ADLivelyTransformCurl;
extern ADLivelyTransform ADLivelyTransformFade;
extern ADLivelyTransform ADLivelyTransformFan;
extern ADLivelyTransform ADLivelyTransformFlip;
extern ADLivelyTransform ADLivelyTransformHelix;
extern ADLivelyTransform ADLivelyTransformTilt;
extern ADLivelyTransform ADLivelyTransformWave;
extern ADLivelyTransform ADLivelyTransformGrow;
@interface ADLivelyCollectionView : UICollectionView <UICollectionViewDelegate, UICollectionViewDataSource> {
id <UICollectionViewDelegate> _preLivelyDelegate;
id <UICollectionViewDataSource> _preLivelyDataSource;
CGPoint _lastScrollPosition;
CGPoint _currentScrollPosition;
ADLivelyTransform _transformBlock;
}
- (CGPoint)scrollSpeed;
- (void)setInitialCellTransformBlock:(ADLivelyTransform)block;
@property (nonatomic, assign) CGFloat speedThreshold; // optional, disables animations when exceeding this speed
@end
| Add comment for the speed threshold | Add comment for the speed threshold
| C | bsd-3-clause | applidium/ADLivelyCollectionView | c | ## Code Before:
//
// ADLivelyCollectionView.h
// ADLivelyCollectionView
//
// Created by Romain Goyet on 18/04/12.
// Copyright (c) 2012 Applidium. All rights reserved.
//
#import <UIKit/UIKit.h>
extern NSTimeInterval ADLivelyDefaultDuration;
typedef NSTimeInterval (^ADLivelyTransform)(CALayer * layer, float speed);
extern ADLivelyTransform ADLivelyTransformCurl;
extern ADLivelyTransform ADLivelyTransformFade;
extern ADLivelyTransform ADLivelyTransformFan;
extern ADLivelyTransform ADLivelyTransformFlip;
extern ADLivelyTransform ADLivelyTransformHelix;
extern ADLivelyTransform ADLivelyTransformTilt;
extern ADLivelyTransform ADLivelyTransformWave;
extern ADLivelyTransform ADLivelyTransformGrow;
@interface ADLivelyCollectionView : UICollectionView <UICollectionViewDelegate, UICollectionViewDataSource> {
id <UICollectionViewDelegate> _preLivelyDelegate;
id <UICollectionViewDataSource> _preLivelyDataSource;
CGPoint _lastScrollPosition;
CGPoint _currentScrollPosition;
ADLivelyTransform _transformBlock;
}
- (CGPoint)scrollSpeed;
- (void)setInitialCellTransformBlock:(ADLivelyTransform)block;
@property (nonatomic, assign) CGFloat speedThreshold;
@end
## Instruction:
Add comment for the speed threshold
## Code After:
//
// ADLivelyCollectionView.h
// ADLivelyCollectionView
//
// Created by Romain Goyet on 18/04/12.
// Copyright (c) 2012 Applidium. All rights reserved.
//
#import <UIKit/UIKit.h>
extern NSTimeInterval ADLivelyDefaultDuration;
typedef NSTimeInterval (^ADLivelyTransform)(CALayer * layer, float speed);
extern ADLivelyTransform ADLivelyTransformCurl;
extern ADLivelyTransform ADLivelyTransformFade;
extern ADLivelyTransform ADLivelyTransformFan;
extern ADLivelyTransform ADLivelyTransformFlip;
extern ADLivelyTransform ADLivelyTransformHelix;
extern ADLivelyTransform ADLivelyTransformTilt;
extern ADLivelyTransform ADLivelyTransformWave;
extern ADLivelyTransform ADLivelyTransformGrow;
@interface ADLivelyCollectionView : UICollectionView <UICollectionViewDelegate, UICollectionViewDataSource> {
id <UICollectionViewDelegate> _preLivelyDelegate;
id <UICollectionViewDataSource> _preLivelyDataSource;
CGPoint _lastScrollPosition;
CGPoint _currentScrollPosition;
ADLivelyTransform _transformBlock;
}
- (CGPoint)scrollSpeed;
- (void)setInitialCellTransformBlock:(ADLivelyTransform)block;
@property (nonatomic, assign) CGFloat speedThreshold; // optional, disables animations when exceeding this speed
@end
| //
// ADLivelyCollectionView.h
// ADLivelyCollectionView
//
// Created by Romain Goyet on 18/04/12.
// Copyright (c) 2012 Applidium. All rights reserved.
//
#import <UIKit/UIKit.h>
extern NSTimeInterval ADLivelyDefaultDuration;
typedef NSTimeInterval (^ADLivelyTransform)(CALayer * layer, float speed);
extern ADLivelyTransform ADLivelyTransformCurl;
extern ADLivelyTransform ADLivelyTransformFade;
extern ADLivelyTransform ADLivelyTransformFan;
extern ADLivelyTransform ADLivelyTransformFlip;
extern ADLivelyTransform ADLivelyTransformHelix;
extern ADLivelyTransform ADLivelyTransformTilt;
extern ADLivelyTransform ADLivelyTransformWave;
extern ADLivelyTransform ADLivelyTransformGrow;
@interface ADLivelyCollectionView : UICollectionView <UICollectionViewDelegate, UICollectionViewDataSource> {
id <UICollectionViewDelegate> _preLivelyDelegate;
id <UICollectionViewDataSource> _preLivelyDataSource;
CGPoint _lastScrollPosition;
CGPoint _currentScrollPosition;
ADLivelyTransform _transformBlock;
}
- (CGPoint)scrollSpeed;
- (void)setInitialCellTransformBlock:(ADLivelyTransform)block;
- @property (nonatomic, assign) CGFloat speedThreshold;
+ @property (nonatomic, assign) CGFloat speedThreshold; // optional, disables animations when exceeding this speed
@end | 2 | 0.058824 | 1 | 1 |
1ea17c2e7e2ef26b8121afa98e069272d4897b5c | README.md | README.md |

Agon is a library for running dedicated game servers on [Kubernetes](https://kubernetes.io).
## Roadmap for 0.1 release
- Develop a [Custom Resource Defintion](https://kubernetes.io/docs/concepts/api-extension/custom-resources/#customresourcedefinitions) for dedicated game server
- Sidecar for managing the DGS lifecycle and recorded status, e.g. registering the port the server has started on
- A Kubernetes operator that registers the CRD, and creates a Pod with the DGS in it, with the accompanying sidecar for system registration.
- A basic client library for integration with a DGS
- Simple example code
- Documentation of the above
## Development
See the tools in the [build](build/README.md) directory for testing and building Agon.
## Licence
Apache 2.0 |
Agon is a library for running dedicated game servers on [Kubernetes](https://kubernetes.io).
## Roadmap for 0.1 release
- Develop a [Custom Resource Defintion](https://kubernetes.io/docs/concepts/api-extension/custom-resources/#customresourcedefinitions) for dedicated game server
- Sidecar for managing the DGS lifecycle and recorded status, e.g. registering the port the server has started on
- A Kubernetes operator that registers the CRD, and creates a Pod with the DGS in it, with the accompanying sidecar for system registration.
- A basic client library for integration with a DGS
- Simple example code
- Documentation of the above
## Development
See the tools in the [build](build/README.md) directory for testing and building Agon.
## Licence
Apache 2.0 | Remove broken image in the readme. | Remove broken image in the readme.
| Markdown | apache-2.0 | nicedone/agones,googleforgames/agones,googleforgames/agones,nicedone/agones,googleforgames/agones,googleforgames/agones,googleforgames/agones,googleforgames/agones,googleforgames/agones,nicedone/agones,nicedone/agones,googleforgames/agones | markdown | ## Code Before:

Agon is a library for running dedicated game servers on [Kubernetes](https://kubernetes.io).
## Roadmap for 0.1 release
- Develop a [Custom Resource Defintion](https://kubernetes.io/docs/concepts/api-extension/custom-resources/#customresourcedefinitions) for dedicated game server
- Sidecar for managing the DGS lifecycle and recorded status, e.g. registering the port the server has started on
- A Kubernetes operator that registers the CRD, and creates a Pod with the DGS in it, with the accompanying sidecar for system registration.
- A basic client library for integration with a DGS
- Simple example code
- Documentation of the above
## Development
See the tools in the [build](build/README.md) directory for testing and building Agon.
## Licence
Apache 2.0
## Instruction:
Remove broken image in the readme.
## Code After:
Agon is a library for running dedicated game servers on [Kubernetes](https://kubernetes.io).
## Roadmap for 0.1 release
- Develop a [Custom Resource Defintion](https://kubernetes.io/docs/concepts/api-extension/custom-resources/#customresourcedefinitions) for dedicated game server
- Sidecar for managing the DGS lifecycle and recorded status, e.g. registering the port the server has started on
- A Kubernetes operator that registers the CRD, and creates a Pod with the DGS in it, with the accompanying sidecar for system registration.
- A basic client library for integration with a DGS
- Simple example code
- Documentation of the above
## Development
See the tools in the [build](build/README.md) directory for testing and building Agon.
## Licence
Apache 2.0 | -
- 
Agon is a library for running dedicated game servers on [Kubernetes](https://kubernetes.io).
## Roadmap for 0.1 release
- Develop a [Custom Resource Defintion](https://kubernetes.io/docs/concepts/api-extension/custom-resources/#customresourcedefinitions) for dedicated game server
- Sidecar for managing the DGS lifecycle and recorded status, e.g. registering the port the server has started on
- A Kubernetes operator that registers the CRD, and creates a Pod with the DGS in it, with the accompanying sidecar for system registration.
- A basic client library for integration with a DGS
- Simple example code
- Documentation of the above
## Development
See the tools in the [build](build/README.md) directory for testing and building Agon.
## Licence
Apache 2.0 | 2 | 0.105263 | 0 | 2 |
34b37a4f477c4db54a4991790bc769cb5cfcc308 | __tests__/components/RemoveModal.spec.js | __tests__/components/RemoveModal.spec.js | import RemoveModal from '~/components/RemoveModal'
import BaseModal from '~/components/BaseModal'
import { mount } from 'helper'
describe('Remove modal component', () => {
let wrapper, baseModalWrapper
beforeEach(() => {
wrapper = mount(RemoveModal, {
stubs: {
post: true
}
})
baseModalWrapper = wrapper.find(BaseModal)
})
describe('Emitted show event from base-modal', () => {
beforeEach(() => {
wrapper.setMethods({
show: jest.fn()
})
const post = {}
baseModalWrapper.vm.$emit('show', post)
})
test('Called show()', () => {
expect(wrapper.vm.show).toHaveBeenCalled()
})
test('Shown the post', () => {
wrapper.vm.post = {}
expect(wrapper.find('post-stub').exists()).toBe(true)
})
})
describe('Emitted hidden event from base-modal', () => {
beforeEach(() => {
wrapper.setMethods({
hidden: jest.fn()
})
baseModalWrapper.vm.$emit('hidden')
})
test('Called hidden()', () => {
expect(wrapper.vm.hidden).toHaveBeenCalled()
})
test('The post is hidden', () => {
expect(wrapper.find('.list-group').exists()).toBe(false)
})
})
})
| import RemoveModal from '~/components/RemoveModal'
import BaseModal from '~/components/BaseModal'
import { shallowMount } from 'helper'
describe('Remove modal component', () => {
let wrapper, baseModalWrapper
beforeEach(() => {
wrapper = shallowMount(RemoveModal, {})
baseModalWrapper = wrapper.find(BaseModal)
})
describe('Emitted show event from base-modal', () => {
beforeEach(() => {
wrapper.setMethods({
show: jest.fn()
})
const post = {}
baseModalWrapper.vm.$emit('show', post)
})
test('Called show()', () => {
expect(wrapper.vm.show).toHaveBeenCalled()
})
test('Shown the post', () => {
wrapper.vm.post = {}
expect(wrapper.find('post-stub').exists()).toBe(true)
})
})
describe('Emitted hidden event from base-modal', () => {
beforeEach(() => {
wrapper.setMethods({
hidden: jest.fn()
})
baseModalWrapper.vm.$emit('hidden')
})
test('Called hidden()', () => {
expect(wrapper.vm.hidden).toHaveBeenCalled()
})
test('The post is hidden', () => {
expect(wrapper.find('.list-group').exists()).toBe(false)
})
})
})
| Use shallowMount instead of mount | Use shallowMount instead of mount
| JavaScript | mit | sunya9/beta,sunya9/beta,sunya9/beta | javascript | ## Code Before:
import RemoveModal from '~/components/RemoveModal'
import BaseModal from '~/components/BaseModal'
import { mount } from 'helper'
describe('Remove modal component', () => {
let wrapper, baseModalWrapper
beforeEach(() => {
wrapper = mount(RemoveModal, {
stubs: {
post: true
}
})
baseModalWrapper = wrapper.find(BaseModal)
})
describe('Emitted show event from base-modal', () => {
beforeEach(() => {
wrapper.setMethods({
show: jest.fn()
})
const post = {}
baseModalWrapper.vm.$emit('show', post)
})
test('Called show()', () => {
expect(wrapper.vm.show).toHaveBeenCalled()
})
test('Shown the post', () => {
wrapper.vm.post = {}
expect(wrapper.find('post-stub').exists()).toBe(true)
})
})
describe('Emitted hidden event from base-modal', () => {
beforeEach(() => {
wrapper.setMethods({
hidden: jest.fn()
})
baseModalWrapper.vm.$emit('hidden')
})
test('Called hidden()', () => {
expect(wrapper.vm.hidden).toHaveBeenCalled()
})
test('The post is hidden', () => {
expect(wrapper.find('.list-group').exists()).toBe(false)
})
})
})
## Instruction:
Use shallowMount instead of mount
## Code After:
import RemoveModal from '~/components/RemoveModal'
import BaseModal from '~/components/BaseModal'
import { shallowMount } from 'helper'
describe('Remove modal component', () => {
let wrapper, baseModalWrapper
beforeEach(() => {
wrapper = shallowMount(RemoveModal, {})
baseModalWrapper = wrapper.find(BaseModal)
})
describe('Emitted show event from base-modal', () => {
beforeEach(() => {
wrapper.setMethods({
show: jest.fn()
})
const post = {}
baseModalWrapper.vm.$emit('show', post)
})
test('Called show()', () => {
expect(wrapper.vm.show).toHaveBeenCalled()
})
test('Shown the post', () => {
wrapper.vm.post = {}
expect(wrapper.find('post-stub').exists()).toBe(true)
})
})
describe('Emitted hidden event from base-modal', () => {
beforeEach(() => {
wrapper.setMethods({
hidden: jest.fn()
})
baseModalWrapper.vm.$emit('hidden')
})
test('Called hidden()', () => {
expect(wrapper.vm.hidden).toHaveBeenCalled()
})
test('The post is hidden', () => {
expect(wrapper.find('.list-group').exists()).toBe(false)
})
})
})
| import RemoveModal from '~/components/RemoveModal'
import BaseModal from '~/components/BaseModal'
- import { mount } from 'helper'
? ^
+ import { shallowMount } from 'helper'
? ^^^^^^^^
describe('Remove modal component', () => {
let wrapper, baseModalWrapper
beforeEach(() => {
- wrapper = mount(RemoveModal, {
? ^
+ wrapper = shallowMount(RemoveModal, {})
? ^^^^^^^^ ++
- stubs: {
- post: true
- }
- })
baseModalWrapper = wrapper.find(BaseModal)
})
describe('Emitted show event from base-modal', () => {
beforeEach(() => {
wrapper.setMethods({
show: jest.fn()
})
const post = {}
baseModalWrapper.vm.$emit('show', post)
})
test('Called show()', () => {
expect(wrapper.vm.show).toHaveBeenCalled()
})
test('Shown the post', () => {
wrapper.vm.post = {}
expect(wrapper.find('post-stub').exists()).toBe(true)
})
})
describe('Emitted hidden event from base-modal', () => {
beforeEach(() => {
wrapper.setMethods({
hidden: jest.fn()
})
baseModalWrapper.vm.$emit('hidden')
})
test('Called hidden()', () => {
expect(wrapper.vm.hidden).toHaveBeenCalled()
})
test('The post is hidden', () => {
expect(wrapper.find('.list-group').exists()).toBe(false)
})
})
}) | 8 | 0.170213 | 2 | 6 |
3a4dfcce5de01f001ba29db5879752ec8e5acf87 | meta-oe/recipes-support/cpprest/cpprest_2.10.13.bb | meta-oe/recipes-support/cpprest/cpprest_2.10.13.bb | SUMMARY = "Microsoft project for cloud-based client-server communication in native code using a modern asynchronous C++ API design."
SECTION = "libs/network"
HOMEPAGE = "https://github.com/Microsoft/cpprestsdk/"
LICENSE = "MIT"
LIC_FILES_CHKSUM = "file://${S}/license.txt;md5=a2e15b954769218ff912468eecd6a02f"
DEPENDS = "openssl websocketpp zlib boost brotli"
EXTRA_OECMAKE = "-DCPPREST_EXPORT_DIR=cmake -DCPPREST_EXCLUDE_BROTLI=OFF"
SRC_URI = "git://github.com/Microsoft/cpprestsdk.git;protocol=https;branch=master \
file://disable-float-tests.patch \
file://disable-outside-tests.patch "
# tag 2.10.13
SRCREV= "9d8f544001cb74544de6dc8c565592f7e2626d6e"
S = "${WORKDIR}/git"
inherit cmake pkgconfig
# Temporary until https://github.com/Microsoft/cpprestsdk/issues/1099
# is fixed properly
CXXFLAGS += "-Wno-error=deprecated-copy -Wno-error=redundant-move"
| SUMMARY = "Microsoft project for cloud-based client-server communication in native code using a modern asynchronous C++ API design."
SECTION = "libs/network"
HOMEPAGE = "https://github.com/Microsoft/cpprestsdk/"
LICENSE = "MIT"
LIC_FILES_CHKSUM = "file://${S}/license.txt;md5=a2e15b954769218ff912468eecd6a02f"
DEPENDS = "openssl websocketpp zlib boost brotli"
EXTRA_OECMAKE = "-DCPPREST_EXPORT_DIR=cmake -DCPPREST_EXCLUDE_BROTLI=OFF"
SRC_URI = "git://github.com/Microsoft/cpprestsdk.git;protocol=https;branch=master \
file://disable-float-tests.patch \
file://disable-outside-tests.patch "
# tag 2.10.13
SRCREV= "9d8f544001cb74544de6dc8c565592f7e2626d6e"
S = "${WORKDIR}/git"
inherit cmake pkgconfig
CXXFLAGS += "-Wno-error"
| Fix build failure with gcc 8 | cpprest: Fix build failure with gcc 8
The workaround for gcc 9 broke building with gcc 8:
cc1plus: error: -Werror=deprecated-copy: no option -Wdeprecated-copy
cc1plus: error: -Werror=redundant-move: no option -Wredundant-move
ninja: build stopped: subcommand failed.
-Werror is often problematic in distribution builds,
disable it globally and permanently.
Signed-off-by: Adrian Bunk <0b86548ef377da0031a3ff3f0c4e06f016e20105@stusta.de>
Signed-off-by: Khem Raj <729d64b6f67515e258459a5f6d20ec88b2caf8df@gmail.com>
| BitBake | mit | openembedded/meta-openembedded,rehsack/meta-openembedded,moto-timo/meta-openembedded,mrchapp/meta-openembedded,VCTLabs/meta-openembedded,rehsack/meta-openembedded,rehsack/meta-openembedded,mrchapp/meta-openembedded,lgirdk/meta-openembedded,openembedded/meta-openembedded,openembedded/meta-openembedded,openembedded/meta-openembedded,schnitzeltony/meta-openembedded,VCTLabs/meta-openembedded,victronenergy/meta-openembedded,mrchapp/meta-openembedded,victronenergy/meta-openembedded,rehsack/meta-openembedded,lgirdk/meta-openembedded,VCTLabs/meta-openembedded,rehsack/meta-openembedded,VCTLabs/meta-openembedded,rehsack/meta-openembedded,lgirdk/meta-openembedded,schnitzeltony/meta-openembedded,victronenergy/meta-openembedded,VCTLabs/meta-openembedded,mrchapp/meta-openembedded,openembedded/meta-openembedded,mrchapp/meta-openembedded,moto-timo/meta-openembedded,lgirdk/meta-openembedded,victronenergy/meta-openembedded,VCTLabs/meta-openembedded,moto-timo/meta-openembedded,rehsack/meta-openembedded,schnitzeltony/meta-openembedded,VCTLabs/meta-openembedded,moto-timo/meta-openembedded,openembedded/meta-openembedded,schnitzeltony/meta-openembedded,openembedded/meta-openembedded,victronenergy/meta-openembedded,openembedded/meta-openembedded,lgirdk/meta-openembedded,victronenergy/meta-openembedded,VCTLabs/meta-openembedded,schnitzeltony/meta-openembedded,mrchapp/meta-openembedded,schnitzeltony/meta-openembedded,moto-timo/meta-openembedded,lgirdk/meta-openembedded,lgirdk/meta-openembedded,schnitzeltony/meta-openembedded,mrchapp/meta-openembedded,victronenergy/meta-openembedded | bitbake | ## Code Before:
SUMMARY = "Microsoft project for cloud-based client-server communication in native code using a modern asynchronous C++ API design."
SECTION = "libs/network"
HOMEPAGE = "https://github.com/Microsoft/cpprestsdk/"
LICENSE = "MIT"
LIC_FILES_CHKSUM = "file://${S}/license.txt;md5=a2e15b954769218ff912468eecd6a02f"
DEPENDS = "openssl websocketpp zlib boost brotli"
EXTRA_OECMAKE = "-DCPPREST_EXPORT_DIR=cmake -DCPPREST_EXCLUDE_BROTLI=OFF"
SRC_URI = "git://github.com/Microsoft/cpprestsdk.git;protocol=https;branch=master \
file://disable-float-tests.patch \
file://disable-outside-tests.patch "
# tag 2.10.13
SRCREV= "9d8f544001cb74544de6dc8c565592f7e2626d6e"
S = "${WORKDIR}/git"
inherit cmake pkgconfig
# Temporary until https://github.com/Microsoft/cpprestsdk/issues/1099
# is fixed properly
CXXFLAGS += "-Wno-error=deprecated-copy -Wno-error=redundant-move"
## Instruction:
cpprest: Fix build failure with gcc 8
The workaround for gcc 9 broke building with gcc 8:
cc1plus: error: -Werror=deprecated-copy: no option -Wdeprecated-copy
cc1plus: error: -Werror=redundant-move: no option -Wredundant-move
ninja: build stopped: subcommand failed.
-Werror is often problematic in distribution builds,
disable it globally and permanently.
Signed-off-by: Adrian Bunk <0b86548ef377da0031a3ff3f0c4e06f016e20105@stusta.de>
Signed-off-by: Khem Raj <729d64b6f67515e258459a5f6d20ec88b2caf8df@gmail.com>
## Code After:
SUMMARY = "Microsoft project for cloud-based client-server communication in native code using a modern asynchronous C++ API design."
SECTION = "libs/network"
HOMEPAGE = "https://github.com/Microsoft/cpprestsdk/"
LICENSE = "MIT"
LIC_FILES_CHKSUM = "file://${S}/license.txt;md5=a2e15b954769218ff912468eecd6a02f"
DEPENDS = "openssl websocketpp zlib boost brotli"
EXTRA_OECMAKE = "-DCPPREST_EXPORT_DIR=cmake -DCPPREST_EXCLUDE_BROTLI=OFF"
SRC_URI = "git://github.com/Microsoft/cpprestsdk.git;protocol=https;branch=master \
file://disable-float-tests.patch \
file://disable-outside-tests.patch "
# tag 2.10.13
SRCREV= "9d8f544001cb74544de6dc8c565592f7e2626d6e"
S = "${WORKDIR}/git"
inherit cmake pkgconfig
CXXFLAGS += "-Wno-error"
| SUMMARY = "Microsoft project for cloud-based client-server communication in native code using a modern asynchronous C++ API design."
SECTION = "libs/network"
HOMEPAGE = "https://github.com/Microsoft/cpprestsdk/"
LICENSE = "MIT"
LIC_FILES_CHKSUM = "file://${S}/license.txt;md5=a2e15b954769218ff912468eecd6a02f"
DEPENDS = "openssl websocketpp zlib boost brotli"
EXTRA_OECMAKE = "-DCPPREST_EXPORT_DIR=cmake -DCPPREST_EXCLUDE_BROTLI=OFF"
SRC_URI = "git://github.com/Microsoft/cpprestsdk.git;protocol=https;branch=master \
file://disable-float-tests.patch \
file://disable-outside-tests.patch "
# tag 2.10.13
SRCREV= "9d8f544001cb74544de6dc8c565592f7e2626d6e"
S = "${WORKDIR}/git"
inherit cmake pkgconfig
+ CXXFLAGS += "-Wno-error"
- # Temporary until https://github.com/Microsoft/cpprestsdk/issues/1099
- # is fixed properly
- CXXFLAGS += "-Wno-error=deprecated-copy -Wno-error=redundant-move" | 4 | 0.173913 | 1 | 3 |
c61ba454d55b6891f800416178304da2a19e4785 | .travis.yml | .travis.yml | language: python
python:
- "2.6"
- "2.7"
- "3.4"
matrix:
allow_failures:
- python: "2.6"
- python: "3.4"
env:
- TEST_TYPE=unit
- TEST_TYPE=functional
install:
- pip install -r requirements/use.txt
- pip install -r requirements/test.txt
script:
- make $TEST_TYPE
| sudo: false
language: python
python:
- "2.6"
- "2.7"
- "3.4"
matrix:
allow_failures:
- python: "2.6"
- python: "3.4"
env:
- TEST_TYPE=unit
- TEST_TYPE=functional
install:
- pip install -r requirements/use.txt
- pip install -r requirements/test.txt
script:
- make $TEST_TYPE
| Upgrade to TravisCI's new infrastructure | Upgrade to TravisCI's new infrastructure
| YAML | mit | BenjamenMeyer/HTTPretty,BenjamenMeyer/HTTPretty,BenjamenMeyer/HTTPretty | yaml | ## Code Before:
language: python
python:
- "2.6"
- "2.7"
- "3.4"
matrix:
allow_failures:
- python: "2.6"
- python: "3.4"
env:
- TEST_TYPE=unit
- TEST_TYPE=functional
install:
- pip install -r requirements/use.txt
- pip install -r requirements/test.txt
script:
- make $TEST_TYPE
## Instruction:
Upgrade to TravisCI's new infrastructure
## Code After:
sudo: false
language: python
python:
- "2.6"
- "2.7"
- "3.4"
matrix:
allow_failures:
- python: "2.6"
- python: "3.4"
env:
- TEST_TYPE=unit
- TEST_TYPE=functional
install:
- pip install -r requirements/use.txt
- pip install -r requirements/test.txt
script:
- make $TEST_TYPE
| + sudo: false
language: python
python:
- "2.6"
- "2.7"
- "3.4"
matrix:
allow_failures:
- python: "2.6"
- python: "3.4"
env:
- TEST_TYPE=unit
- TEST_TYPE=functional
install:
- pip install -r requirements/use.txt
- pip install -r requirements/test.txt
script:
- make $TEST_TYPE | 1 | 0.047619 | 1 | 0 |
86d53091d2ddd9245e9a3b9d2fa69edf8563bb02 | scripts/update.sh | scripts/update.sh |
cd $(dirname "$0")
# Retrieve bulk data
curl -s -o /tmp/data.zip http://scc.virginia.gov/clk/data/CISbemon.CSV.zip
# Uncompress the ZIP file
unzip -d ../data/ /tmp/data.zip
# Rename files to lowercase
rename 'y/A-Z/a-z/' ../data/*
# Delete temporary artifacts
rm /tmp/data.zip
cd ../data/
sqlite3 vabusinesses.sqlite < ../scripts/load-data.sql
|
cd $(dirname "$0")
# Retrieve bulk data
curl -s -o /tmp/data.zip http://scc.virginia.gov/clk/data/CISbemon.CSV.zip
# Uncompress the ZIP file
unzip -d ../data/ /tmp/data.zip
# Rename files to lowercase
rename 'y/A-Z/a-z/' ../data/*
# Eliminate the periods from a pair of filename
mv name.history.csv name_history.csv
mv reserved.name.csv reserved_name.csv
# Delete temporary artifacts
rm /tmp/data.zip
cd ../data/
sqlite3 vabusinesses.sqlite < ../scripts/load-data.sql
| Rename problematic data file names | Rename problematic data file names
| Shell | mit | openva/vabusinesses.org,openva/vabusinesses.org | shell | ## Code Before:
cd $(dirname "$0")
# Retrieve bulk data
curl -s -o /tmp/data.zip http://scc.virginia.gov/clk/data/CISbemon.CSV.zip
# Uncompress the ZIP file
unzip -d ../data/ /tmp/data.zip
# Rename files to lowercase
rename 'y/A-Z/a-z/' ../data/*
# Delete temporary artifacts
rm /tmp/data.zip
cd ../data/
sqlite3 vabusinesses.sqlite < ../scripts/load-data.sql
## Instruction:
Rename problematic data file names
## Code After:
cd $(dirname "$0")
# Retrieve bulk data
curl -s -o /tmp/data.zip http://scc.virginia.gov/clk/data/CISbemon.CSV.zip
# Uncompress the ZIP file
unzip -d ../data/ /tmp/data.zip
# Rename files to lowercase
rename 'y/A-Z/a-z/' ../data/*
# Eliminate the periods from a pair of filename
mv name.history.csv name_history.csv
mv reserved.name.csv reserved_name.csv
# Delete temporary artifacts
rm /tmp/data.zip
cd ../data/
sqlite3 vabusinesses.sqlite < ../scripts/load-data.sql
|
cd $(dirname "$0")
# Retrieve bulk data
curl -s -o /tmp/data.zip http://scc.virginia.gov/clk/data/CISbemon.CSV.zip
# Uncompress the ZIP file
unzip -d ../data/ /tmp/data.zip
# Rename files to lowercase
rename 'y/A-Z/a-z/' ../data/*
+ # Eliminate the periods from a pair of filename
+ mv name.history.csv name_history.csv
+ mv reserved.name.csv reserved_name.csv
+
# Delete temporary artifacts
rm /tmp/data.zip
cd ../data/
sqlite3 vabusinesses.sqlite < ../scripts/load-data.sql | 4 | 0.235294 | 4 | 0 |
1a227227b377bb3515a3c5e4b4135d0075b2fe2a | docker-compose.yml | docker-compose.yml | version: '2.1'
volumes:
mysql:
driver: local
rvf-store:
driver: local
services:
db:
image: mysql:5.7
restart: always
environment:
- MYSQL_ROOT_PASSWORD=snomed
- MYSQL_USERNAME=rvf_user
- MYSQL_PASSWORD=snomedrvf
- MYSQL_DATABASE=rvf_master
networks:
rvf_net:
aliases:
- sql
volumes:
- mysql:/var/lib/mysql
command:
mysqld --sql_mode="NO_ENGINE_SUBSTITUTION,STRICT_TRANS_TABLES"
rvf:
image: snomedinternational/release-validation-framework:latest
container_name: rvf
depends_on:
- db
environment:
- rvf.assertion.import.required=true
entrypoint: java -Xms512m -Xmx4g -DrvfConfigLocation=/app/config -jar api.jar --server.port=8081 --server.servlet.context-path=/api
ports:
- 8081:8081
networks:
- rvf_net
volumes:
- rvf-store:/app/store
restart: always
networks:
rvf_net:
| version: '2.1'
volumes:
mysql:
driver: local
rvf-store:
driver: local
services:
db:
image: mysql:5.7
restart: always
environment:
- MYSQL_ROOT_PASSWORD=snomed
- MYSQL_USERNAME=rvf_user
- MYSQL_PASSWORD=snomedrvf
- MYSQL_DATABASE=rvf_master
networks:
rvf_net:
aliases:
- sql
volumes:
- mysql:/var/lib/mysql
command:
mysqld --sql_mode="NO_ENGINE_SUBSTITUTION,STRICT_TRANS_TABLES" --lower_case_table_names=1
rvf:
image: snomedinternational/release-validation-framework:latest
container_name: rvf
depends_on:
- db
environment:
- rvf.assertion.import.required=true
entrypoint: java -Xms512m -Xmx4g -DrvfConfigLocation=/app/config -jar api.jar --server.port=8081 --server.servlet.context-path=/api
ports:
- 8081:8081
networks:
- rvf_net
volumes:
- rvf-store:/app/store
restart: always
networks:
rvf_net:
| Fix failing test issues because of mixed case table names | Fix failing test issues because of mixed case table names
| YAML | apache-2.0 | IHTSDO/release-validation-framework,IHTSDO/release-validation-framework,IHTSDO/release-validation-framework | yaml | ## Code Before:
version: '2.1'
volumes:
mysql:
driver: local
rvf-store:
driver: local
services:
db:
image: mysql:5.7
restart: always
environment:
- MYSQL_ROOT_PASSWORD=snomed
- MYSQL_USERNAME=rvf_user
- MYSQL_PASSWORD=snomedrvf
- MYSQL_DATABASE=rvf_master
networks:
rvf_net:
aliases:
- sql
volumes:
- mysql:/var/lib/mysql
command:
mysqld --sql_mode="NO_ENGINE_SUBSTITUTION,STRICT_TRANS_TABLES"
rvf:
image: snomedinternational/release-validation-framework:latest
container_name: rvf
depends_on:
- db
environment:
- rvf.assertion.import.required=true
entrypoint: java -Xms512m -Xmx4g -DrvfConfigLocation=/app/config -jar api.jar --server.port=8081 --server.servlet.context-path=/api
ports:
- 8081:8081
networks:
- rvf_net
volumes:
- rvf-store:/app/store
restart: always
networks:
rvf_net:
## Instruction:
Fix failing test issues because of mixed case table names
## Code After:
version: '2.1'
volumes:
mysql:
driver: local
rvf-store:
driver: local
services:
db:
image: mysql:5.7
restart: always
environment:
- MYSQL_ROOT_PASSWORD=snomed
- MYSQL_USERNAME=rvf_user
- MYSQL_PASSWORD=snomedrvf
- MYSQL_DATABASE=rvf_master
networks:
rvf_net:
aliases:
- sql
volumes:
- mysql:/var/lib/mysql
command:
mysqld --sql_mode="NO_ENGINE_SUBSTITUTION,STRICT_TRANS_TABLES" --lower_case_table_names=1
rvf:
image: snomedinternational/release-validation-framework:latest
container_name: rvf
depends_on:
- db
environment:
- rvf.assertion.import.required=true
entrypoint: java -Xms512m -Xmx4g -DrvfConfigLocation=/app/config -jar api.jar --server.port=8081 --server.servlet.context-path=/api
ports:
- 8081:8081
networks:
- rvf_net
volumes:
- rvf-store:/app/store
restart: always
networks:
rvf_net:
| version: '2.1'
volumes:
mysql:
driver: local
rvf-store:
driver: local
services:
db:
image: mysql:5.7
restart: always
environment:
- MYSQL_ROOT_PASSWORD=snomed
- MYSQL_USERNAME=rvf_user
- MYSQL_PASSWORD=snomedrvf
- MYSQL_DATABASE=rvf_master
networks:
rvf_net:
aliases:
- sql
volumes:
- mysql:/var/lib/mysql
command:
- mysqld --sql_mode="NO_ENGINE_SUBSTITUTION,STRICT_TRANS_TABLES"
+ mysqld --sql_mode="NO_ENGINE_SUBSTITUTION,STRICT_TRANS_TABLES" --lower_case_table_names=1
? +++++++++++++++++++++++++++
rvf:
image: snomedinternational/release-validation-framework:latest
container_name: rvf
depends_on:
- db
environment:
- rvf.assertion.import.required=true
entrypoint: java -Xms512m -Xmx4g -DrvfConfigLocation=/app/config -jar api.jar --server.port=8081 --server.servlet.context-path=/api
ports:
- 8081:8081
networks:
- rvf_net
volumes:
- rvf-store:/app/store
restart: always
networks:
rvf_net: | 2 | 0.046512 | 1 | 1 |
b619e26647903a39b6eb1ce59dca2cb9e8966228 | flask_boilerplate/templates/uwsgi.ini | flask_boilerplate/templates/uwsgi.ini | [uwsgi]
if-env = PORT
http-socket = :$(PORT)
endif =
if-not-env = PORT
http-socket = :5000
endif =
if-env = VIRTUAL_ENV
home = $(VIRTUAL_ENV)
endif =
master = true
processes = 4
die-on-term = true
module = run:_app
memory-report = true
| [uwsgi]
if-env = PORT
http-socket = :$(PORT)
endif =
if-not-env = PORT
http-socket = :5000
endif =
if-env = VIRTUAL_ENV
home = $(VIRTUAL_ENV)
endif =
harakiri = 20
max-requests = 5000
vacuum = true
master = true
processes = 4
die-on-term = true
module = run:_app
memory-report = true
| Add settings related to request limitation. | Add settings related to request limitation.
| INI | mit | FGtatsuro/flask-boilerplate,FGtatsuro/flask-boilerplate,FGtatsuro/flask-boilerplate | ini | ## Code Before:
[uwsgi]
if-env = PORT
http-socket = :$(PORT)
endif =
if-not-env = PORT
http-socket = :5000
endif =
if-env = VIRTUAL_ENV
home = $(VIRTUAL_ENV)
endif =
master = true
processes = 4
die-on-term = true
module = run:_app
memory-report = true
## Instruction:
Add settings related to request limitation.
## Code After:
[uwsgi]
if-env = PORT
http-socket = :$(PORT)
endif =
if-not-env = PORT
http-socket = :5000
endif =
if-env = VIRTUAL_ENV
home = $(VIRTUAL_ENV)
endif =
harakiri = 20
max-requests = 5000
vacuum = true
master = true
processes = 4
die-on-term = true
module = run:_app
memory-report = true
| [uwsgi]
if-env = PORT
http-socket = :$(PORT)
endif =
if-not-env = PORT
http-socket = :5000
endif =
if-env = VIRTUAL_ENV
home = $(VIRTUAL_ENV)
endif =
+ harakiri = 20
+ max-requests = 5000
+ vacuum = true
+
master = true
processes = 4
die-on-term = true
module = run:_app
memory-report = true | 4 | 0.235294 | 4 | 0 |
f8539cecede8b8688172c4587231c431b0d4a142 | requirements-test.txt | requirements-test.txt | pytest==5.3.5
pytest-mock==1.11.2
pytest-asyncio==0.10.0
coverage==4.5.4
mock==3.0.5
docker==4.0.1
# Code style
flake8==3.7.9
# black
isort==4.3.21
pre-commit==1.20.0
| pytest==5.3.5
pytest-mock
pytest-asyncio
coverage==4.5.4
mock==3.0.5
docker==4.0.1
# Code style
flake8==3.7.9
# black
isort==4.3.21
pre-commit==1.20.0
| Update pytest-mock from 1.11.2 to 2.0.0 | Update pytest-mock from 1.11.2 to 2.0.0 | Text | mit | xeroc/python-bitshares | text | ## Code Before:
pytest==5.3.5
pytest-mock==1.11.2
pytest-asyncio==0.10.0
coverage==4.5.4
mock==3.0.5
docker==4.0.1
# Code style
flake8==3.7.9
# black
isort==4.3.21
pre-commit==1.20.0
## Instruction:
Update pytest-mock from 1.11.2 to 2.0.0
## Code After:
pytest==5.3.5
pytest-mock
pytest-asyncio
coverage==4.5.4
mock==3.0.5
docker==4.0.1
# Code style
flake8==3.7.9
# black
isort==4.3.21
pre-commit==1.20.0
| pytest==5.3.5
- pytest-mock==1.11.2
+ pytest-mock
- pytest-asyncio==0.10.0
? --------
+ pytest-asyncio
coverage==4.5.4
mock==3.0.5
docker==4.0.1
# Code style
flake8==3.7.9
# black
isort==4.3.21
pre-commit==1.20.0 | 4 | 0.333333 | 2 | 2 |
fc00f88b2c51f0af6e12e46434037102ef7cb61b | main.go | main.go | package main
import (
"flag"
"log"
"os"
)
var (
PlaybookFile = flag.String("play", "site.yml", "Path to the playbook to execute")
InventoryFile = flag.String("i", "hosts", "Path to the inventory file")
LimitHosts = flag.String("l", "", "Limit hosts")
)
func main() {
flag.Parse()
log.SetFlags(0)
log.SetOutput(os.Stdout)
inv, err := LoadInventoryFile(*InventoryFile)
if err != nil {
log.Fatalf("error loading inventory file %q reason=%s", *InventoryFile, err.Error())
}
if ngroups := len(inv); ngroups == 1 {
log.Println("Loaded 1 group from inventory")
} else {
log.Printf("Loaded %d groups from inventory", ngroups)
}
// Run a sanity check on the inventory groups.
for _, g := range inv {
if err = g.Check(); err != nil {
log.Fatalf("Error in group %q: %s", g.Name, err.Error())
}
}
return
}
| package main
import (
"flag"
"log"
"os"
)
var (
PlaybookFile = flag.String("play", "site.yml", "Path to the playbook to execute")
InventoryFile = flag.String("i", "hosts", "Path to the inventory file")
LimitHosts = flag.String("l", "", "Limit hosts")
)
func main() {
flag.Parse()
log.SetFlags(0)
log.SetOutput(os.Stdout)
inv, err := LoadInventoryFile(*InventoryFile)
if err != nil {
log.Fatalf("error loading inventory file %q reason=%s", *InventoryFile, err.Error())
}
if ngroups := len(inv); ngroups == 1 {
log.Println("Loaded 1 group from inventory")
} else {
log.Printf("Loaded %d groups from inventory", ngroups)
}
// Run a sanity check on the inventory groups.
for _, g := range inv {
if err = g.Check(); err != nil {
log.Fatalf("Error in group %q: %s", g.Name, err.Error())
}
}
// Load the playbook.
plays, err := LoadPlaybook(*PlaybookFile)
if err != nil {
log.Fatalf("Error loading playbook %q: %s", *PlaybookFile, err.Error())
}
if nplays := len(plays); nplays == 1 {
log.Println("Loaded 1 play")
} else {
log.Printf("Loaded %d plays", len(plays))
}
// Check the plays.
for _, p := range plays {
if err := p.Check(); err != nil {
log.Fatalf("Error in play %q: %s", p.Name, err.Error())
}
}
return
}
| Load and check the plays in the playbook. | Load and check the plays in the playbook.
| Go | apache-2.0 | nesv/govern | go | ## Code Before:
package main
import (
"flag"
"log"
"os"
)
var (
PlaybookFile = flag.String("play", "site.yml", "Path to the playbook to execute")
InventoryFile = flag.String("i", "hosts", "Path to the inventory file")
LimitHosts = flag.String("l", "", "Limit hosts")
)
func main() {
flag.Parse()
log.SetFlags(0)
log.SetOutput(os.Stdout)
inv, err := LoadInventoryFile(*InventoryFile)
if err != nil {
log.Fatalf("error loading inventory file %q reason=%s", *InventoryFile, err.Error())
}
if ngroups := len(inv); ngroups == 1 {
log.Println("Loaded 1 group from inventory")
} else {
log.Printf("Loaded %d groups from inventory", ngroups)
}
// Run a sanity check on the inventory groups.
for _, g := range inv {
if err = g.Check(); err != nil {
log.Fatalf("Error in group %q: %s", g.Name, err.Error())
}
}
return
}
## Instruction:
Load and check the plays in the playbook.
## Code After:
package main
import (
"flag"
"log"
"os"
)
var (
PlaybookFile = flag.String("play", "site.yml", "Path to the playbook to execute")
InventoryFile = flag.String("i", "hosts", "Path to the inventory file")
LimitHosts = flag.String("l", "", "Limit hosts")
)
func main() {
flag.Parse()
log.SetFlags(0)
log.SetOutput(os.Stdout)
inv, err := LoadInventoryFile(*InventoryFile)
if err != nil {
log.Fatalf("error loading inventory file %q reason=%s", *InventoryFile, err.Error())
}
if ngroups := len(inv); ngroups == 1 {
log.Println("Loaded 1 group from inventory")
} else {
log.Printf("Loaded %d groups from inventory", ngroups)
}
// Run a sanity check on the inventory groups.
for _, g := range inv {
if err = g.Check(); err != nil {
log.Fatalf("Error in group %q: %s", g.Name, err.Error())
}
}
// Load the playbook.
plays, err := LoadPlaybook(*PlaybookFile)
if err != nil {
log.Fatalf("Error loading playbook %q: %s", *PlaybookFile, err.Error())
}
if nplays := len(plays); nplays == 1 {
log.Println("Loaded 1 play")
} else {
log.Printf("Loaded %d plays", len(plays))
}
// Check the plays.
for _, p := range plays {
if err := p.Check(); err != nil {
log.Fatalf("Error in play %q: %s", p.Name, err.Error())
}
}
return
}
| package main
import (
"flag"
"log"
"os"
)
var (
PlaybookFile = flag.String("play", "site.yml", "Path to the playbook to execute")
InventoryFile = flag.String("i", "hosts", "Path to the inventory file")
LimitHosts = flag.String("l", "", "Limit hosts")
)
func main() {
flag.Parse()
log.SetFlags(0)
log.SetOutput(os.Stdout)
inv, err := LoadInventoryFile(*InventoryFile)
if err != nil {
log.Fatalf("error loading inventory file %q reason=%s", *InventoryFile, err.Error())
}
if ngroups := len(inv); ngroups == 1 {
log.Println("Loaded 1 group from inventory")
} else {
log.Printf("Loaded %d groups from inventory", ngroups)
}
// Run a sanity check on the inventory groups.
for _, g := range inv {
if err = g.Check(); err != nil {
log.Fatalf("Error in group %q: %s", g.Name, err.Error())
}
}
+ // Load the playbook.
+ plays, err := LoadPlaybook(*PlaybookFile)
+ if err != nil {
+ log.Fatalf("Error loading playbook %q: %s", *PlaybookFile, err.Error())
+ }
+
+ if nplays := len(plays); nplays == 1 {
+ log.Println("Loaded 1 play")
+ } else {
+ log.Printf("Loaded %d plays", len(plays))
+ }
+
+ // Check the plays.
+ for _, p := range plays {
+ if err := p.Check(); err != nil {
+ log.Fatalf("Error in play %q: %s", p.Name, err.Error())
+ }
+ }
+
return
} | 19 | 0.487179 | 19 | 0 |
504d24b3cd2687fd9d0ef3a3b8a7bcd4c792b007 | tests/Unit/ConfigLtirmRtrimTest.php | tests/Unit/ConfigLtirmRtrimTest.php | <?php namespace Tests\Unit;
use Performance\Performance;
use Performance\Config;
class ConfigLtirmRtrimTest extends \PHPUnit_Framework_TestCase
{
protected function setUp()
{
Config::reset();
}
public function testStaticFunctionPoint()
{
// You can specify the characters you want to strip
Config::set(Config::POINT_LABEL_LTRIM, 'synchronize');
Config::set(Config::POINT_LABEL_RTRIM, 'Run');
$this->synchronizeTaskARun();
$this->synchronizeTaskBRun();
$this->synchronizeTaskCRun();
// Finish all tasks and show test results
Performance::results();
}
// Create task
public function synchronizeTaskARun()
{
// Set point Task A
Performance::point(__FUNCTION__);
//
// Run code
sleep(1);
//
// Finish point Task C
Performance::finish();
}
public function synchronizeTaskBRun()
{
// Set point Task B
Performance::point(__FUNCTION__);
//
// Run code
sleep(1);
//
// Finish point Task B
Performance::finish();
}
public function synchronizeTaskCRun()
{
// Set point Task C
Performance::point(__FUNCTION__);
//
// Run code
//
// Finish point Task C
Performance::finish();
}
} | <?php namespace Tests\Unit;
use Performance\Performance;
use Performance\Config;
class ConfigLtirmRtrimTest extends \PHPUnit_Framework_TestCase
{
protected function setUp()
{
Config::reset();
}
public function testStaticFunctionPoint()
{
// You can specify the characters you want to strip
Config::set(Config::POINT_LABEL_LTRIM, 'synchronize');
Config::set(Config::POINT_LABEL_RTRIM, 'Run');
$this->synchronizeTaskARun();
$this->synchronizeTaskBRun();
$this->synchronizeTaskCRun();
// Finish all tasks and show test results
Performance::results();
}
// Create task
public function synchronizeTaskARun()
{
// Set point Task A
Performance::point(__FUNCTION__);
//
// Run code
// sleep(1);
usleep(2000);
//
// Finish point Task C
Performance::finish();
}
public function synchronizeTaskBRun()
{
// Set point Task B
Performance::point(__FUNCTION__);
//
// Run code
usleep(2000);
//
// Finish point Task B
Performance::finish();
}
public function synchronizeTaskCRun()
{
// Set point Task C
Performance::point(__FUNCTION__);
//
// Run code
usleep(2000);
//
// Finish point Task C
Performance::finish();
}
} | Change 1s to 2 ms | Change 1s to 2 ms
| PHP | apache-2.0 | bvanhoekelen/performance,bvanhoekelen/performance | php | ## Code Before:
<?php namespace Tests\Unit;
use Performance\Performance;
use Performance\Config;
class ConfigLtirmRtrimTest extends \PHPUnit_Framework_TestCase
{
protected function setUp()
{
Config::reset();
}
public function testStaticFunctionPoint()
{
// You can specify the characters you want to strip
Config::set(Config::POINT_LABEL_LTRIM, 'synchronize');
Config::set(Config::POINT_LABEL_RTRIM, 'Run');
$this->synchronizeTaskARun();
$this->synchronizeTaskBRun();
$this->synchronizeTaskCRun();
// Finish all tasks and show test results
Performance::results();
}
// Create task
public function synchronizeTaskARun()
{
// Set point Task A
Performance::point(__FUNCTION__);
//
// Run code
sleep(1);
//
// Finish point Task C
Performance::finish();
}
public function synchronizeTaskBRun()
{
// Set point Task B
Performance::point(__FUNCTION__);
//
// Run code
sleep(1);
//
// Finish point Task B
Performance::finish();
}
public function synchronizeTaskCRun()
{
// Set point Task C
Performance::point(__FUNCTION__);
//
// Run code
//
// Finish point Task C
Performance::finish();
}
}
## Instruction:
Change 1s to 2 ms
## Code After:
<?php namespace Tests\Unit;
use Performance\Performance;
use Performance\Config;
class ConfigLtirmRtrimTest extends \PHPUnit_Framework_TestCase
{
protected function setUp()
{
Config::reset();
}
public function testStaticFunctionPoint()
{
// You can specify the characters you want to strip
Config::set(Config::POINT_LABEL_LTRIM, 'synchronize');
Config::set(Config::POINT_LABEL_RTRIM, 'Run');
$this->synchronizeTaskARun();
$this->synchronizeTaskBRun();
$this->synchronizeTaskCRun();
// Finish all tasks and show test results
Performance::results();
}
// Create task
public function synchronizeTaskARun()
{
// Set point Task A
Performance::point(__FUNCTION__);
//
// Run code
// sleep(1);
usleep(2000);
//
// Finish point Task C
Performance::finish();
}
public function synchronizeTaskBRun()
{
// Set point Task B
Performance::point(__FUNCTION__);
//
// Run code
usleep(2000);
//
// Finish point Task B
Performance::finish();
}
public function synchronizeTaskCRun()
{
// Set point Task C
Performance::point(__FUNCTION__);
//
// Run code
usleep(2000);
//
// Finish point Task C
Performance::finish();
}
} | <?php namespace Tests\Unit;
use Performance\Performance;
use Performance\Config;
class ConfigLtirmRtrimTest extends \PHPUnit_Framework_TestCase
{
protected function setUp()
{
Config::reset();
}
public function testStaticFunctionPoint()
{
// You can specify the characters you want to strip
Config::set(Config::POINT_LABEL_LTRIM, 'synchronize');
Config::set(Config::POINT_LABEL_RTRIM, 'Run');
$this->synchronizeTaskARun();
$this->synchronizeTaskBRun();
$this->synchronizeTaskCRun();
// Finish all tasks and show test results
Performance::results();
}
// Create task
public function synchronizeTaskARun()
{
// Set point Task A
Performance::point(__FUNCTION__);
//
// Run code
- sleep(1);
+ // sleep(1);
? ++
+ usleep(2000);
//
// Finish point Task C
Performance::finish();
}
public function synchronizeTaskBRun()
{
// Set point Task B
Performance::point(__FUNCTION__);
//
// Run code
- sleep(1);
? ^
+ usleep(2000);
? + ^^^^
//
// Finish point Task B
Performance::finish();
}
public function synchronizeTaskCRun()
{
// Set point Task C
Performance::point(__FUNCTION__);
//
// Run code
+ usleep(2000);
//
// Finish point Task C
Performance::finish();
}
} | 6 | 0.085714 | 4 | 2 |
3ea9ba57fd732bf9ca07159811b65488f80dc5b7 | features/step_definitions/common_steps.rb | features/step_definitions/common_steps.rb | When(/^I see the home page$/) do
visit root_path
end
When(/^I click "([^"]*)"$/) do |link|
click_on link
end
When(/^I click "([^"]*)" next to a (.*)$/) do |link, thing_to_ignore|
within("table tbody tr td:contains('#{link}')", match: :first) { click_on link }
end
When(/^I enter (.*) as (.*)$/) do |value, field|
fill_in field, :with => value
end
Then(/^I should see a new (.*)$/) do |thing|
expect(page).to have_content(/#{thing.capitalize} was successfully created./)
end
Then(/^I should see a (.*) destroyed$/) do |thing|
expect(page).to have_content(/#{thing.capitalize} was successfully destroyed./)
end
Then(/^I should see all the (.*)$/) do |things|
within('h1') { expect(page).to have_content(/^Listing #{things}$/) }
end
Then(/^I should see "([^"]*)"$/) do |value|
expect(page).to have_content(/#{value}/)
end
| When(/^I see the home page$/) do
visit root_path
end
When(/^I click "([^"]*)"$/) do |link|
click_on link
end
When(/^I click "([^"]*)" next to a (.*)$/) do |link, thing_to_ignore|
within("table tbody tr td:contains('#{link}')", match: :first) { click_on link }
end
When(/^I enter (.*) as (.*)$/) do |value, field|
fill_in field, :with => value
end
Then(/^I should see a new (.*)$/) do |thing|
expect(page).to have_content(/#{thing.capitalize} was successfully created./)
end
Then(/^I should see a (.*) destroyed$/) do |thing|
expect(page).to have_content(/#{thing.capitalize} was successfully destroyed./)
end
Then(/^I should see "([^"]*)"$/) do |value|
expect(page).to have_content(/#{value}/)
end
| Remove old common steps from cuke | Remove old common steps from cuke
| Ruby | mit | hughdavenport/powershop_devtrain_hangman_rails,hughdavenport/powershop_devtrain_hangman_rails,hughdavenport/powershop_devtrain_hangman_rails | ruby | ## Code Before:
When(/^I see the home page$/) do
visit root_path
end
When(/^I click "([^"]*)"$/) do |link|
click_on link
end
When(/^I click "([^"]*)" next to a (.*)$/) do |link, thing_to_ignore|
within("table tbody tr td:contains('#{link}')", match: :first) { click_on link }
end
When(/^I enter (.*) as (.*)$/) do |value, field|
fill_in field, :with => value
end
Then(/^I should see a new (.*)$/) do |thing|
expect(page).to have_content(/#{thing.capitalize} was successfully created./)
end
Then(/^I should see a (.*) destroyed$/) do |thing|
expect(page).to have_content(/#{thing.capitalize} was successfully destroyed./)
end
Then(/^I should see all the (.*)$/) do |things|
within('h1') { expect(page).to have_content(/^Listing #{things}$/) }
end
Then(/^I should see "([^"]*)"$/) do |value|
expect(page).to have_content(/#{value}/)
end
## Instruction:
Remove old common steps from cuke
## Code After:
When(/^I see the home page$/) do
visit root_path
end
When(/^I click "([^"]*)"$/) do |link|
click_on link
end
When(/^I click "([^"]*)" next to a (.*)$/) do |link, thing_to_ignore|
within("table tbody tr td:contains('#{link}')", match: :first) { click_on link }
end
When(/^I enter (.*) as (.*)$/) do |value, field|
fill_in field, :with => value
end
Then(/^I should see a new (.*)$/) do |thing|
expect(page).to have_content(/#{thing.capitalize} was successfully created./)
end
Then(/^I should see a (.*) destroyed$/) do |thing|
expect(page).to have_content(/#{thing.capitalize} was successfully destroyed./)
end
Then(/^I should see "([^"]*)"$/) do |value|
expect(page).to have_content(/#{value}/)
end
| When(/^I see the home page$/) do
visit root_path
end
When(/^I click "([^"]*)"$/) do |link|
click_on link
end
When(/^I click "([^"]*)" next to a (.*)$/) do |link, thing_to_ignore|
within("table tbody tr td:contains('#{link}')", match: :first) { click_on link }
end
When(/^I enter (.*) as (.*)$/) do |value, field|
fill_in field, :with => value
end
Then(/^I should see a new (.*)$/) do |thing|
expect(page).to have_content(/#{thing.capitalize} was successfully created./)
end
Then(/^I should see a (.*) destroyed$/) do |thing|
expect(page).to have_content(/#{thing.capitalize} was successfully destroyed./)
end
- Then(/^I should see all the (.*)$/) do |things|
- within('h1') { expect(page).to have_content(/^Listing #{things}$/) }
- end
-
Then(/^I should see "([^"]*)"$/) do |value|
expect(page).to have_content(/#{value}/)
end | 4 | 0.125 | 0 | 4 |
135c095bad390cfdec05ee9035209245cd5c0fd3 | src/util/object-utilities.js | src/util/object-utilities.js | export var ObjectUtilities = {
isEqual: function (objA, objB) {
var aKeys = Object.keys(objA);
var bKeys = Object.keys(objB);
if (aKeys.length !== bKeys.length) {
return false;
}
for (var i = 0, len = aKeys.length; i < len; i++) {
var key = aKeys[i];
if (!objB.hasOwnProperty(key) || objA[key] !== objB[key]) {
return false;
}
}
return true;
}
};
export var StringUtil = {
pluralize: function (str, count) {
var s = str;
if (count > 1) {
if (str.endsWith("y")) {
s = str.substring(0, str.length - 2) + 'ies';
} else {
s += 's';
}
}
return s;
}
};
| export var ObjectUtilities = {
isEqual: function (objA, objB) {
if( !objA || !objB ) {
return (!objA && !objB);
}
var aKeys = Object.keys(objA);
var bKeys = Object.keys(objB);
if (aKeys.length !== bKeys.length) {
return false;
}
for (var i = 0, len = aKeys.length; i < len; i++) {
var key = aKeys[i];
if (!objB.hasOwnProperty(key) || objA[key] !== objB[key]) {
return false;
}
}
return true;
}
};
export var StringUtil = {
pluralize: function (str, count) {
var s = str;
if (count > 1) {
if (str.endsWith("y")) {
s = str.substring(0, str.length - 2) + 'ies';
} else {
s += 's';
}
}
return s;
}
};
| Fix bug where object comparison is done on null object. | Fix bug where object comparison is done on null object.
| JavaScript | apache-2.0 | kristianmandrup/stamp-web-aurelia,stamp-web/stamp-web-aurelia,stamp-web/stamp-web-aurelia,kristianmandrup/stamp-web-aurelia | javascript | ## Code Before:
export var ObjectUtilities = {
isEqual: function (objA, objB) {
var aKeys = Object.keys(objA);
var bKeys = Object.keys(objB);
if (aKeys.length !== bKeys.length) {
return false;
}
for (var i = 0, len = aKeys.length; i < len; i++) {
var key = aKeys[i];
if (!objB.hasOwnProperty(key) || objA[key] !== objB[key]) {
return false;
}
}
return true;
}
};
export var StringUtil = {
pluralize: function (str, count) {
var s = str;
if (count > 1) {
if (str.endsWith("y")) {
s = str.substring(0, str.length - 2) + 'ies';
} else {
s += 's';
}
}
return s;
}
};
## Instruction:
Fix bug where object comparison is done on null object.
## Code After:
export var ObjectUtilities = {
isEqual: function (objA, objB) {
if( !objA || !objB ) {
return (!objA && !objB);
}
var aKeys = Object.keys(objA);
var bKeys = Object.keys(objB);
if (aKeys.length !== bKeys.length) {
return false;
}
for (var i = 0, len = aKeys.length; i < len; i++) {
var key = aKeys[i];
if (!objB.hasOwnProperty(key) || objA[key] !== objB[key]) {
return false;
}
}
return true;
}
};
export var StringUtil = {
pluralize: function (str, count) {
var s = str;
if (count > 1) {
if (str.endsWith("y")) {
s = str.substring(0, str.length - 2) + 'ies';
} else {
s += 's';
}
}
return s;
}
};
| export var ObjectUtilities = {
isEqual: function (objA, objB) {
+ if( !objA || !objB ) {
+ return (!objA && !objB);
+ }
var aKeys = Object.keys(objA);
var bKeys = Object.keys(objB);
if (aKeys.length !== bKeys.length) {
return false;
}
for (var i = 0, len = aKeys.length; i < len; i++) {
var key = aKeys[i];
if (!objB.hasOwnProperty(key) || objA[key] !== objB[key]) {
return false;
}
}
return true;
}
};
export var StringUtil = {
pluralize: function (str, count) {
var s = str;
if (count > 1) {
if (str.endsWith("y")) {
s = str.substring(0, str.length - 2) + 'ies';
} else {
s += 's';
}
}
return s;
}
}; | 3 | 0.1 | 3 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.