text
stringlengths 16
4.96k
| positive
stringlengths 321
2.24k
| negative
stringlengths 310
2.21k
|
|---|---|---|
Add forbidden error if mismatch
|
'use strict';
const Account = require('../../models/account');
const createError = require('http-errors');
const bcrypt = require('bcrypt-as-promised');
const jwt = require('jsonwebtoken-as-promised');
function createToken (req, res, next) {
Account.findOne({ email: req.body.email })
.then(ensureAccount)
.then(compareHashes)
.then(generateToken)
.then(sendResponse)
.catch(onError)
;
function ensureAccount (account) {
if (!account) {
throw new createError.Forbidden();
}
return account;
}
function compareHashes (account) {
return bcrypt.compare(req.body.password, account.passwordHash);
}
function generateToken (account) {
const payload = {
id: account.id,
email: account.email,
profile: account.profile,
};
return jwt.sign(payload, req.app.locals.configuration.jwt.secret, req.app.locals.configuration.jwt.options);
}
function sendResponse (token) {
const jwt = {
access_token: token,
token_type: 'bearer'
};
res.status(201).json(jwt);
}
function onError (err) {
if (err && err.name === 'MismatchError') {
return next(new createError.Forbidden(err));
}
next(err);
}
}
module.exports = createToken;
|
'use strict';
const Account = require('../../models/account');
const createError = require('http-errors');
const bcrypt = require('bcrypt-as-promised');
const jwt = require('jsonwebtoken-as-promised');
function createToken (req, res, next) {
Account.findOne({ email: req.body.email })
.then(ensureAccount)
.then(compareHashes)
.then(generateToken)
.then(sendResponse)
.catch(next)
;
function ensureAccount (account) {
if (!account) {
throw new createError.Forbidden();
}
return account;
}
function compareHashes (account) {
return bcrypt.compare(req.body.password, account.passwordHash);
}
function generateToken (account) {
const payload = {
id: account.id,
email: account.email,
profile: account.profile,
};
return jwt.sign(payload, req.app.locals.configuration.jwt.secret, req.app.locals.configuration.jwt.options);
}
function sendResponse (token) {
const jwt = {
access_token: token,
token_type: 'bearer'
};
res.status(201).json(jwt);
}
}
module.exports = createToken;
|
Change page size to 25
|
from rest_framework.pagination import PageNumberPagination
from rest_framework.response import Response
class QuotesResultsSetPagination(PageNumberPagination):
page_size = 25
page_size_query_param = 'page_size'
max_page_size = 10000
def get_paginated_response(self, data):
return Response({
'pages': {
'next': self.page.next_page_number() if self.page.has_next() else None,
'previous': self.page.previous_page_number() if self.page.has_previous() else None
},
'count': self.page.paginator.count,
'results': data
})
|
from rest_framework.pagination import PageNumberPagination
from rest_framework.response import Response
class QuotesResultsSetPagination(PageNumberPagination):
page_size = 10
page_size_query_param = 'page_size'
max_page_size = 10000
def get_paginated_response(self, data):
return Response({
'pages': {
'next': self.page.next_page_number() if self.page.has_next() else None,
'previous': self.page.previous_page_number() if self.page.has_previous() else None
},
'count': self.page.paginator.count,
'results': data
})
|
Clean up some dead code
|
var R = require('ramda');
var util = require('util');
var jomini = require('jomini');
function flattenHistory(history) {
var isDate = R.pipe(R.split('.'), R.length, R.gt(2));
var split = R.partition(isDate, R.keys(history));
var initial = {
date: new Date(Date.UTC(1, 0, 1)),
events: R.omit(split[1], history)
};
// Some of the dates may have multiple events spread over multiple
// instances, for eg: '1800.1.1': [{owner: 'FRA'}, {religion: 'reformed'}].
// We want to combine these arrays into a single object
var acct = function(acc, val) {
var valArr = util.isArray(history[val]) ? history[val] : [history[val]];
return R.append({
date: jomini.toDate(val),
events: R.foldl(R.mixin, {}, valArr)
}, acc);
};
var arr = R.foldl(acct, [], split[1]);
return split[0].length > 0 ? R.prepend(initial, arr) : arr;
}
module.exports = flattenHistory;
|
var R = require('ramda');
var util = require('util');
var jomini = require('jomini');
function flattenHistory(history) {
var result = [];
var isDate = R.pipe(R.split('.'), R.length, R.gt(2));
var split = R.partition(isDate, R.keys(history));
var initial = {
date: new Date(Date.UTC(1, 0, 1)),
events: R.omit(split[1], history)
};
// Some of the dates may have multiple events spread over multiple
// instances, for eg: '1800.1.1': [{owner: 'FRA'}, {religion: 'reformed'}].
// We want to combine these arrays into a single object
var acct = function(acc, val) {
var valArr = util.isArray(history[val]) ? history[val] : [history[val]];
return R.append({
date: jomini.toDate(val),
events: R.foldl(R.mixin, {}, valArr)
}, acc);
}
var arr = R.foldl(acct, [], split[1])
return split[0].length > 0 ? R.prepend(initial, arr) : arr;
}
module.exports = flattenHistory;
|
Revert "Added: default sass compiler"
This reverts commit 6e89a663200b996c605ecd05dc1f0b60852c004e.
|
const gulp = require('gulp');
const plugins = require('gulp-load-plugins')();
const config = require('./gulp/config')();
const env = {
development: plugins.environments.development,
production: plugins.environments.production
};
// Default environment is production
plugins.environments.current(env.production);
function getTask(task) {
return require('./gulp/tasks/' + task)(gulp, plugins, config, env);
}
gulp.task('generate-favicon',
getTask('favicon')
);
require('./gulp/serve')(gulp, plugins, config, env);
gulp.task('serve',
gulp.series(
getTask('setDevelopment'),
gulp.parallel(getTask('eslint'), getTask('sasslint')),
gulp.parallel(getTask('copy'), getTask('javascript'), getTask('sass'), getTask('jsonMinify')),
getTask('html'),
gulp.parallel('watch', 'ws')
)
);
gulp.task('ci',
gulp.series(
gulp.parallel(getTask('eslint-fail'), getTask('sasslint')),
gulp.parallel(getTask('copy'), getTask('javascript'), getTask('sass'), getTask('jsonMinify')),
getTask('html'),
getTask('clean')
)
);
gulp.task('default',
gulp.series(
gulp.parallel(getTask('eslint'), getTask('sasslint')),
gulp.parallel(getTask('copy'), getTask('javascript'), getTask('sass'), getTask('jsonMinify')),
getTask('html'),
getTask('clean')
)
);
|
const gulp = require('gulp');
const plugins = require('gulp-load-plugins')();
const config = require('./gulp/config')();
const sass = require('gulp-sass')(require('sass'));
const env = {
development: plugins.environments.development,
production: plugins.environments.production
};
// Default environment is production
plugins.environments.current(env.production);
function getTask(task) {
return require('./gulp/tasks/' + task)(gulp, plugins, config, env);
}
gulp.task('generate-favicon',
getTask('favicon')
);
require('./gulp/serve')(gulp, plugins, config, env);
gulp.task('serve',
gulp.series(
getTask('setDevelopment'),
gulp.parallel(getTask('eslint'), getTask('sasslint')),
gulp.parallel(getTask('copy'), getTask('javascript'), getTask('sass'), getTask('jsonMinify')),
getTask('html'),
gulp.parallel('watch', 'ws')
)
);
gulp.task('ci',
gulp.series(
gulp.parallel(getTask('eslint-fail'), getTask('sasslint')),
gulp.parallel(getTask('copy'), getTask('javascript'), getTask('sass'), getTask('jsonMinify')),
getTask('html'),
getTask('clean')
)
);
gulp.task('default',
gulp.series(
gulp.parallel(getTask('eslint'), getTask('sasslint')),
gulp.parallel(getTask('copy'), getTask('javascript'), getTask('sass'), getTask('jsonMinify')),
getTask('html'),
getTask('clean')
)
);
|
Update @ Sat Mar 11 2017 17:01:32 GMT+0800 (CST)
|
const { exec } = require('child_process')
const ora = require('ora')
const config = require('../config')
const spinner = ora('Deploy to gh-pages...')
spinner.start()
function execute (cmd) {
return new Promise((resolve, reject) => {
exec(cmd, (err, stdout, stderr) => {
if (err) return reject(err)
if (stderr) return reject(stderr)
resolve(stdout)
})
})
}
execute(`cd ${config.paths.output}`)
.then(stdout => {
execute('pwd').then(s => console.log(s.toString()))
return execute('git add --all')
})
.then(stdout => {
return execute(`git commit -m 'Update @ ${new Date}'`)
})
.then(stdout => {
return execute('git push -u origin gh-pages')
})
.then(stdout => {
spinner.stop()
})
.catch(err => {
throw err
spinner.stop()
})
|
const { exec } = require('child_process')
const ora = require('ora')
const config = require('../config')
const spinner = ora('Deploy to gh-pages...')
spinner.start()
function execute (cmd) {
return new Promise((resolve, reject) => {
exec(cmd, (err, stdout, stderr) => {
if (err) return reject(err)
if (stderr) return reject(stderr)
resolve(stdout)
})
})
}
execute(`cd ${config.paths.output}`)
.then(stdout => {
return execute('git add --all')
})
.then(stdout => {
return execute(`git commit -m 'Update @ ${new Date}'`)
})
.then(stdout => {
return execute('git push -u origin gh-pages')
})
.then(stdout => {
spinner.stop()
})
.catch(err => {
throw err
spinner.stop()
})
|
Fix fixture encoding on Windows
|
"""Test data"""
from pathlib import Path
def patharg(path):
"""
Back slashes need to be escaped in ITEM args,
even in Windows paths.
"""
return str(path).replace('\\', '\\\\\\')
FIXTURES_ROOT = Path(__file__).parent
FILE_PATH = FIXTURES_ROOT / 'test.txt'
JSON_FILE_PATH = FIXTURES_ROOT / 'test.json'
BIN_FILE_PATH = FIXTURES_ROOT / 'test.bin'
FILE_PATH_ARG = patharg(FILE_PATH)
BIN_FILE_PATH_ARG = patharg(BIN_FILE_PATH)
JSON_FILE_PATH_ARG = patharg(JSON_FILE_PATH)
# Strip because we don't want new lines in the data so that we can
# easily count occurrences also when embedded in JSON (where the new
# line would be escaped).
FILE_CONTENT = FILE_PATH.read_text('utf8').strip()
JSON_FILE_CONTENT = JSON_FILE_PATH.read_text('utf8')
BIN_FILE_CONTENT = BIN_FILE_PATH.read_bytes()
UNICODE = FILE_CONTENT
|
"""Test data"""
from pathlib import Path
def patharg(path):
"""
Back slashes need to be escaped in ITEM args,
even in Windows paths.
"""
return str(path).replace('\\', '\\\\\\')
FIXTURES_ROOT = Path(__file__).parent
FILE_PATH = FIXTURES_ROOT / 'test.txt'
JSON_FILE_PATH = FIXTURES_ROOT / 'test.json'
BIN_FILE_PATH = FIXTURES_ROOT / 'test.bin'
FILE_PATH_ARG = patharg(FILE_PATH)
BIN_FILE_PATH_ARG = patharg(BIN_FILE_PATH)
JSON_FILE_PATH_ARG = patharg(JSON_FILE_PATH)
# Strip because we don't want new lines in the data so that we can
# easily count occurrences also when embedded in JSON (where the new
# line would be escaped).
FILE_CONTENT = FILE_PATH.read_text().strip()
JSON_FILE_CONTENT = JSON_FILE_PATH.read_text()
BIN_FILE_CONTENT = BIN_FILE_PATH.read_bytes()
UNICODE = FILE_CONTENT
|
Add css style to Dashboard sidebar item
|
package org.panifex.platform.module.dashboard.impl;
import java.util.ArrayList;
import java.util.Collection;
import org.apache.aries.blueprint.annotation.Bean;
import org.apache.aries.blueprint.annotation.Service;
import org.panifex.platform.module.api.sidebar.Sidebar;
import org.panifex.platform.module.api.sidebar.DefaultSidebarCommand;
import org.panifex.platform.module.api.sidebar.SidebarItem;
@Bean(id = DashboardSidebar.ID)
@Service(interfaces = Sidebar.class)
public class DashboardSidebar implements Sidebar {
public final static String ID = "org.panifex.platform.module.dashboard.impl.DashboardSidebar";
private Collection<SidebarItem> sidebarItems = new ArrayList<>();
/**
* Initializes Dashboard sidebar items;
*/
public DashboardSidebar() {
// create dashboard sidebar item
DefaultSidebarCommand dashboardItem = new DefaultSidebarCommand(
"Dashboard",
DashboardContent.ID,
0);
dashboardItem.setIconSclass("glyphicon glyphicon-home");
// add item to list
sidebarItems.add(dashboardItem);
}
@Override
public Collection<SidebarItem> getSidebarItems() {
return sidebarItems;
}
}
|
package org.panifex.platform.module.dashboard.impl;
import java.util.ArrayList;
import java.util.Collection;
import org.apache.aries.blueprint.annotation.Bean;
import org.apache.aries.blueprint.annotation.Service;
import org.panifex.platform.module.api.sidebar.Sidebar;
import org.panifex.platform.module.api.sidebar.DefaultSidebarCommand;
import org.panifex.platform.module.api.sidebar.SidebarItem;
@Bean(id = DashboardSidebar.ID)
@Service(interfaces = Sidebar.class)
public class DashboardSidebar implements Sidebar {
public final static String ID = "org.panifex.platform.module.dashboard.impl.DashboardSidebar";
private Collection<SidebarItem> sidebarItems = new ArrayList<>();
/**
* Initializes Dashboard sidebar items;
*/
public DashboardSidebar() {
// create dashboard sidebar item
DefaultSidebarCommand dashboardItem = new DefaultSidebarCommand(
"Dashboard",
DashboardContent.ID,
0);
// add item to list
sidebarItems.add(dashboardItem);
}
@Override
public Collection<SidebarItem> getSidebarItems() {
return sidebarItems;
}
}
|
Add channel numbers to 16B spectral setup
|
# Line SPW setup for 16B projects
linespw_dict = {0: ["HI", "1.420405752GHz", 4096],
3: ["OH1612", "1.612231GHz", 256],
5: ["OH1665", "1.6654018GHz", 256],
6: ["OH1667", "1.667359GHz", 256],
7: ["OH1720", "1.72053GHz", 256],
9: ["H152alp", "1.85425GHz", 128],
8: ["H153alp", "1.81825GHz", 128],
1: ["H166alp", "1.42473GHz", 128],
4: ["H158alp", "1.65154GHz", 128],
2: ["H164alp", "1.47734GHz", 128]}
|
# Line SPW setup for 16B projects
linespw_dict = {0: ["HI", "1.420405752GHz"],
3: ["OH1612", "1.612231GHz"],
5: ["OH1665", "1.6654018GHz"],
6: ["OH1667", "1.667359GHz"],
7: ["OH1720", "1.72053GHz"],
9: ["H152alp", "1.85425GHz"],
8: ["H153alp", "1.81825GHz"],
1: ["H166alp", "1.42473GHz"],
4: ["H158alp", "1.65154GHz"],
2: ["H164alp", "1.47734GHz"]}
|
Set clear flag on render pass to false
The clear flag needs to be flase to allow transparent render passes on
top of eachother. It was originally set to false in nin, but updating
three.js resets it to true, since that's the default in the official
three.js version.
|
/**
* @author alteredq / http://alteredqualia.com/
*/
THREE.RenderPass = function ( scene, camera, overrideMaterial, clearColor, clearAlpha ) {
THREE.Pass.call( this );
this.scene = scene;
this.camera = camera;
this.overrideMaterial = overrideMaterial;
this.clearColor = clearColor;
this.clearAlpha = ( clearAlpha !== undefined ) ? clearAlpha : 1;
this.oldClearColor = new THREE.Color();
this.oldClearAlpha = 1;
this.clear = false;
this.needsSwap = false;
};
THREE.RenderPass.prototype = Object.create( THREE.Pass.prototype );
THREE.RenderPass.prototype = {
constructor: THREE.RenderPass,
render: function ( renderer, writeBuffer, readBuffer, delta, maskActive ) {
this.scene.overrideMaterial = this.overrideMaterial;
if ( this.clearColor ) {
this.oldClearColor.copy( renderer.getClearColor() );
this.oldClearAlpha = renderer.getClearAlpha();
renderer.setClearColor( this.clearColor, this.clearAlpha );
}
renderer.render( this.scene, this.camera, readBuffer, this.clear );
if ( this.clearColor ) {
renderer.setClearColor( this.oldClearColor, this.oldClearAlpha );
}
this.scene.overrideMaterial = null;
}
};
|
/**
* @author alteredq / http://alteredqualia.com/
*/
THREE.RenderPass = function ( scene, camera, overrideMaterial, clearColor, clearAlpha ) {
THREE.Pass.call( this );
this.scene = scene;
this.camera = camera;
this.overrideMaterial = overrideMaterial;
this.clearColor = clearColor;
this.clearAlpha = ( clearAlpha !== undefined ) ? clearAlpha : 1;
this.oldClearColor = new THREE.Color();
this.oldClearAlpha = 1;
this.clear = true;
this.needsSwap = false;
};
THREE.RenderPass.prototype = Object.create( THREE.Pass.prototype );
THREE.RenderPass.prototype = {
constructor: THREE.RenderPass,
render: function ( renderer, writeBuffer, readBuffer, delta, maskActive ) {
this.scene.overrideMaterial = this.overrideMaterial;
if ( this.clearColor ) {
this.oldClearColor.copy( renderer.getClearColor() );
this.oldClearAlpha = renderer.getClearAlpha();
renderer.setClearColor( this.clearColor, this.clearAlpha );
}
renderer.render( this.scene, this.camera, readBuffer, this.clear );
if ( this.clearColor ) {
renderer.setClearColor( this.oldClearColor, this.oldClearAlpha );
}
this.scene.overrideMaterial = null;
}
};
|
Fix use of default config to match new refactor
|
from hypothesis_auto import auto_pytest_magic
from isort import parse
from isort.settings import Config
TEST_CONTENTS = """
import xyz
import abc
def function():
pass
"""
def test_file_contents():
(
in_lines,
out_lines,
import_index,
place_imports,
import_placements,
as_map,
imports,
categorized_comments,
first_comment_index_start,
first_comment_index_end,
change_count,
original_line_count,
line_separator,
sections,
section_comments,
) = parse.file_contents(TEST_CONTENTS, config=Config())
assert "\n".join(in_lines) == TEST_CONTENTS
assert "import" not in "\n".join(out_lines)
assert import_index == 1
assert change_count == -2
assert original_line_count == len(in_lines)
auto_pytest_magic(parse.import_type)
auto_pytest_magic(parse.skip_line)
auto_pytest_magic(parse._strip_syntax)
auto_pytest_magic(parse._infer_line_separator)
|
from hypothesis_auto import auto_pytest_magic
from isort import parse
from isort.settings import default
TEST_CONTENTS = """
import xyz
import abc
def function():
pass
"""
def test_file_contents():
(
in_lines,
out_lines,
import_index,
place_imports,
import_placements,
as_map,
imports,
categorized_comments,
first_comment_index_start,
first_comment_index_end,
change_count,
original_line_count,
line_separator,
sections,
section_comments,
) = parse.file_contents(TEST_CONTENTS, config=default)
assert "\n".join(in_lines) == TEST_CONTENTS
assert "import" not in "\n".join(out_lines)
assert import_index == 1
assert change_count == -2
assert original_line_count == len(in_lines)
auto_pytest_magic(parse.import_type)
auto_pytest_magic(parse.skip_line)
auto_pytest_magic(parse._strip_syntax)
auto_pytest_magic(parse._infer_line_separator)
|
Update test to check for required chrome values
|
var detect = require('rtc-core/detect');
var extend = require('cog/extend');
var test = require('tape');
var expect = require('./helpers/expect-constraints');
function mozMediaSource(type) {
return {
mozMediaSource: type,
mediaSource: type
};
}
test('share', expect({
audio: false,
video: extend(detect.moz ? mozMediaSource('window') : {}, {
mandatory: detect.moz ? {} : {
chromeMediaSource: 'screen'
},
optional: [
{ maxWidth: 1920 },
{ width: { max: 1920 } },
{ maxHeight: 1080 },
{ height: { max: 1080 } }
]
})
}));
test('share:window', expect({
audio: false,
video: extend(detect.moz ? mozMediaSource('window') : {}, {
mandatory: detect.moz ? {} : {
chromeMediaSource: 'screen'
},
optional: [
{ maxWidth: 1920 },
{ width: { max: 1920 } },
{ maxHeight: 1080 },
{ height: { max: 1080 } }
]
})
}));
|
var detect = require('rtc-core/detect');
var extend = require('cog/extend');
var test = require('tape');
var expect = require('./helpers/expect-constraints');
function mozMediaSource(type) {
return {
mozMediaSource: type,
mediaSource: type
};
}
test('share', expect({
audio: false,
video: extend(detect.moz ? mozMediaSource('window') : {}, {
mandatory: detect.moz ? {} : {
chromeMediaSource: 'window'
},
optional: [
{ maxWidth: 1920 },
{ width: { max: 1920 } },
{ maxHeight: 1080 },
{ height: { max: 1080 } }
]
})
}));
test('share:winwod', expect({
audio: false,
video: extend(detect.moz ? mozMediaSource('window') : {}, {
mandatory: detect.moz ? {} : {
chromeMediaSource: 'window'
},
optional: [
{ maxWidth: 1920 },
{ width: { max: 1920 } },
{ maxHeight: 1080 },
{ height: { max: 1080 } }
]
})
}));
|
Add new versions of Marionette for specific version test
|
/*
* (C) Copyright 2016 Boni Garcia (http://bonigarcia.github.io/)
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the GNU Lesser General Public License
* (LGPL) version 2.1 which accompanies this distribution, and is available at
* http://www.gnu.org/licenses/lgpl-2.1.html
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
*/
package io.github.bonigarcia.wdm.test;
import org.junit.Before;
import io.github.bonigarcia.wdm.MarionetteDriverManager;
import io.github.bonigarcia.wdm.base.BaseVersionTst;
/**
* Test asserting Marionette versions.
*
* @author Boni Garcia (boni.gg@gmail.com)
* @since 1.4.0
*/
public class MarionetteVersionTest extends BaseVersionTst {
@Before
public void setup() {
browserManager = MarionetteDriverManager.getInstance();
specificVersions = new String[] { "0.9.0", "0.8.0", "0.6.2", "0.5.0",
"0.4.0", "0.3.0" };
}
}
|
/*
* (C) Copyright 2016 Boni Garcia (http://bonigarcia.github.io/)
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the GNU Lesser General Public License
* (LGPL) version 2.1 which accompanies this distribution, and is available at
* http://www.gnu.org/licenses/lgpl-2.1.html
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
*/
package io.github.bonigarcia.wdm.test;
import org.junit.Before;
import io.github.bonigarcia.wdm.MarionetteDriverManager;
import io.github.bonigarcia.wdm.base.BaseVersionTst;
/**
* Test asserting Marionette versions.
*
* @author Boni Garcia (boni.gg@gmail.com)
* @since 1.4.0
*/
public class MarionetteVersionTest extends BaseVersionTst {
@Before
public void setup() {
browserManager = MarionetteDriverManager.getInstance();
specificVersions = new String[] { "0.6.2", "0.5.0", "0.4.0", "0.3.0" };
}
}
|
Add API key option for sensor downloader
|
// Download sensors from RPI Data Service
// Outputs each sensor in list to a file in output directory.
// If Api Key is specified, then it will be added to each object. Api Key is
// for access through the Data Service Proxy.
var fs = require('fs');
var request = require('request');
var qs = require('querystring');
var argv = require('yargs')
.usage('Usage: $0 <RPI HOST URL> <User UID> [API key] <-o directory>')
.demand(2)
.demand(['o'])
.argv;
var url = argv._[0] + "/v01/users/" + argv._[1] + "/sensors";
request({
url: url,
qs: {
detail: 'true'
}
}, function (error, response, body) {
outputStations(JSON.parse(body));
});
var outputStations = function (stations) {
stations.forEach(function(item) {
var filename = argv.o + "/" + encodeURIComponent(item.title) + ".json";
if (argv._[2]) {
item.api_key = argv._[2];
}
fs.writeFile(filename, JSON.stringify(item, null, " "), function (err) {
if (err) {
console.error("error!", err);
}
});
});
};
|
// Download sensors from RPI Data Service
// Outputs each sensor in list to a file in output directory.
var fs = require('fs');
var request = require('request');
var qs = require('querystring');
var argv = require('yargs')
.usage('Usage: $0 <RPI HOST URL> <User UID> <-o directory>')
.demand(2)
.demand(['o'])
.argv;
var url = argv._[0] + "/v01/users/" + argv._[1] + "/sensors";
request({
url: url,
qs: {
detail: 'true'
}
}, function (error, response, body) {
outputStations(JSON.parse(body));
});
var outputStations = function (stations) {
stations.forEach(function(item) {
var filename = argv.o + "/" + encodeURIComponent(item.title) + ".json";
fs.writeFile(filename, JSON.stringify(item, null, " "), function (err) {
if (err) {
console.error("error!", err);
}
});
});
};
|
Add assignJSON/toJSON to Number types
|
"use strict";
let errors = require('../errors.js');
let Type = require('./type.js');
let Value = require('./value.js');
class NumberValue extends Value {
constructor(type) {
super(type);
this.value = 0;
}
assign(newValue) {
if (typeof newValue == 'number') {
this.value = newValue;
} else if (newValue.value !== undefined && typeof newValue.value == 'number') {
this.value = newValue.value;
} else {
throw new errors.Internal(`Trying to assign ${newValue.type} to Number;`);
}
}
equals(other) {
return this.type == other.type && this.value == other.value;
}
innerToString() {
return `${this.value}`;
}
assignJSON(spec) {
this.value = spec;
}
toJSON() {
return this.value;
}
toString() {
return `${this.value}`;
}
}
class NumberType extends Type {
constructor() {
super(null, null, 'Number');
}
equals(other) {
return other === NumberType.singleton;
}
makeDefaultValue() {
return new NumberValue(this);
}
toString() {
return 'Number';
}
}
NumberType.singleton = new NumberType();
module.exports = {
Type: NumberType,
Value: NumberValue,
};
|
"use strict";
let errors = require('../errors.js');
let Type = require('./type.js');
let Value = require('./value.js');
class NumberValue extends Value {
constructor(type) {
super(type);
this.value = 0;
}
assign(newValue) {
if (typeof newValue == 'number') {
this.value = newValue;
} else if (newValue.value !== undefined && typeof newValue.value == 'number') {
this.value = newValue.value;
} else {
throw new errors.Internal(`Trying to assign ${newValue.type} to Number;`);
}
}
equals(other) {
return this.type == other.type && this.value == other.value;
}
innerToString() {
return `${this.value}`;
}
toString() {
return `${this.value}`;
}
}
class NumberType extends Type {
constructor() {
super(null, null, 'Number');
}
equals(other) {
return other === NumberType.singleton;
}
makeDefaultValue() {
return new NumberValue(this);
}
toString() {
return 'Number';
}
}
NumberType.singleton = new NumberType();
module.exports = {
Type: NumberType,
Value: NumberValue,
};
|
Fix codacy, use Interface instead of concrete type
|
package net.sf.jabref.model.metadata;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
public class ContentSelectors {
private final Map<String, List<String>> contentSelectors;
public ContentSelectors() {
contentSelectors = new HashMap<>();
}
public void addContentSelector(String fieldName, List<String> selectors) {
Objects.requireNonNull(fieldName);
Objects.requireNonNull(selectors);
this.contentSelectors.put(fieldName, selectors);
}
public List<String> getSelectorsForField(String fieldName) {
List<String> result = contentSelectors.get(fieldName);
if (result == null) {
result = Collections.emptyList();
}
return result;
}
public void removeSelector(String fieldName) {
contentSelectors.remove(fieldName);
}
public static ContentSelectors parse(List<String> selectors) {
//fixme: do the actual parsing
return null;
}
public List<String> getAsStringList() {
// fixme: do the actual serialization
return null;
}
public Map<String, List<String>> getSelectorData() {
return contentSelectors;
}
}
|
package net.sf.jabref.model.metadata;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Objects;
public class ContentSelectors {
private HashMap<String, List<String>> contentSelectors;
public ContentSelectors() {
contentSelectors = new HashMap<>();
}
public void addContentSelector(String fieldName, List<String> selectors) {
Objects.requireNonNull(fieldName);
Objects.requireNonNull(selectors);
this.contentSelectors.put(fieldName, selectors);
}
public List<String> getSelectorsForField(String fieldName) {
List<String> result = contentSelectors.get(fieldName);
if(result == null){
result = Collections.emptyList();
}
return result;
}
public void removeSelector(String fieldName) {
contentSelectors.remove(fieldName);
}
public static ContentSelectors parse(List<String> selectors) {
//fixme: do the actual parsing
return null;
}
public List<String> getAsStringList() {
// fixme: do the actual serialization
return null;
}
public HashMap<String, List<String>> getSelectorData() {
return contentSelectors;
}
}
|
Sample: Use create instead of getInstance
getInstance was renamed to create in 7ca4e0312ae898e369ba1e67bd9ec9245fb2a4d9
but wasn't changed here
|
package com.polidea.rxandroidble.sample;
import android.app.Application;
import android.content.Context;
import com.polidea.rxandroidble.RxBleClient;
import com.polidea.rxandroidble.internal.RxBleLog;
public class SampleApplication extends Application {
private RxBleClient rxBleClient;
/**
* In practise you will use some kind of dependency injection pattern.
*/
public static RxBleClient getRxBleClient(Context context) {
SampleApplication application = (SampleApplication) context.getApplicationContext();
return application.rxBleClient;
}
@Override
public void onCreate() {
super.onCreate();
rxBleClient = RxBleClient.create(this);
RxBleClient.setLogLevel(RxBleLog.DEBUG);
}
}
|
package com.polidea.rxandroidble.sample;
import android.app.Application;
import android.content.Context;
import com.polidea.rxandroidble.RxBleClient;
import com.polidea.rxandroidble.internal.RxBleLog;
public class SampleApplication extends Application {
private RxBleClient rxBleClient;
/**
* In practise you will use some kind of dependency injection pattern.
*/
public static RxBleClient getRxBleClient(Context context) {
SampleApplication application = (SampleApplication) context.getApplicationContext();
return application.rxBleClient;
}
@Override
public void onCreate() {
super.onCreate();
rxBleClient = RxBleClient.getInstance(this);
RxBleClient.setLogLevel(RxBleLog.DEBUG);
}
}
|
Make collisions with invaders destroy the player
|
package uk.co.alynn.games.ld30.world;
public abstract class Invader implements Adversary {
private final float m_x, m_y;
public Invader(float x, float y) {
m_x = x;
m_y = y;
}
@Override
public String getImage() {
return "invader";
}
@Override
public float getX() {
return m_x;
}
@Override
public float getY() {
return m_y;
}
abstract public Adversary update(float dt);
@Override
public Adversary hitPlayer(Runnable terminateGame) {
terminateGame.run();
return null;
}
@Override
public Adversary hitBullet() {
return null;
}
@Override
public float getHeading() {
return 0.0f;
}
}
|
package uk.co.alynn.games.ld30.world;
public abstract class Invader implements Adversary {
private final float m_x, m_y;
public Invader(float x, float y) {
m_x = x;
m_y = y;
}
@Override
public String getImage() {
return "invader";
}
@Override
public float getX() {
return m_x;
}
@Override
public float getY() {
return m_y;
}
abstract public Adversary update(float dt);
@Override
public Adversary hitPlayer(Runnable terminateGame) {
return this;
}
@Override
public Adversary hitBullet() {
return null;
}
@Override
public float getHeading() {
return 0.0f;
}
}
|
Fix the use of the moksha.templates.widget template, in one place. This needs to be fixed in many places
|
from moksha.lib.base import Controller
from moksha.lib.helpers import Category, MokshaApp, Not, not_anonymous, MokshaWidget
from moksha.api.widgets.containers import DashboardContainer
from moksha.api.widgets import ContextAwareWidget
from tg import expose, tmpl_context, require, request
from bugs import BugsController
from builds import BuildsController
from changelog import ChangelogController
from downloads import DownloadsController
from maintainers import MaintainersController
from owners import OwnersController
from updates import UpdatesController
from versions import VersionsController
from watchers import WatchersController
class OverviewDashboard(DashboardContainer, ContextAwareWidget):
template = 'mako:fedoracommunity.mokshaapps.packages.templates.single_col_dashboard'
layout = [Category('content-col-apps',[])]
overview_dashboard = OverviewDashboard
class OverviewController(Controller):
bugs = BugsController()
builds = BuildsController()
changelog = ChangelogController()
downloads = DownloadsController()
maintainers = MaintainersController()
owners = OwnersController()
updates = UpdatesController()
verisons = VersionsController()
watchers = WatchersController()
@expose('mako:moksha.templates.widget')
def index(self, package):
tmpl_context.widget = overview_dashboard
return dict(package=package, options={})
@expose('mako:moksha.templates.widget')
def overview(self, package):
tmpl_context.widget = overview_dashboard
return dict(package=package, options={})
|
from moksha.lib.base import Controller
from moksha.lib.helpers import Category, MokshaApp, Not, not_anonymous, MokshaWidget
from moksha.api.widgets.containers import DashboardContainer
from moksha.api.widgets import ContextAwareWidget
from tg import expose, tmpl_context, require, request
from bugs import BugsController
from builds import BuildsController
from changelog import ChangelogController
from downloads import DownloadsController
from maintainers import MaintainersController
from owners import OwnersController
from updates import UpdatesController
from versions import VersionsController
from watchers import WatchersController
class OverviewDashboard(DashboardContainer, ContextAwareWidget):
template = 'mako:fedoracommunity.mokshaapps.packages.templates.single_col_dashboard'
layout = [Category('content-col-apps',[])]
overview_dashboard = OverviewDashboard
class OverviewController(Controller):
bugs = BugsController()
builds = BuildsController()
changelog = ChangelogController()
downloads = DownloadsController()
maintainers = MaintainersController()
owners = OwnersController()
updates = UpdatesController()
verisons = VersionsController()
watchers = WatchersController()
@expose('mako:moksha.templates.widget')
def index(self, package):
tmpl_context.widget = overview_dashboard
return {'package': package}
@expose('mako:moksha.templates.widget')
def overview(self, package):
return self.index(package)
|
doc: Add simple docs to Event entities
|
from Entity import *
class EventEntity(Entity):
"""Event entities represent specific timepoints with associated data,
e.g. a trigger events. Data can be binary (8, 16 or 32 bit) values, text
or comma separated values (cvs).
"""
EVENT_TEXT = 1
EVENT_CSV = 2
EVENT_BYTE = 3
EVENT_WORD = 4
EVENT_DWORD = 5
def __init__(self, nsfile, eid, info):
super(EventEntity,self).__init__(eid, nsfile, info)
@property
def event_type(self):
"""The type of the event:
* binary (8, 16, 32 bit) [``EVENT_BYTE, EVENT_WORD, EVENT_DWORD``]
* text [``EVENT_TEXT``]
* comma separated values (csv) [``EVENT_CSV``]
"""
return self._info['EventType']
@property
def csv_desc(self):
return self._info['CSVDesc']
@property
def max_data_length(self):
return self._info['MaxDataLength']
def get_data (self, index):
"""Retrieve the data at ``index``"""
lib = self.file.library
data = lib._get_event_data (self, index)
return data
|
from Entity import *
class EventEntity(Entity):
EVENT_TEXT = 1
EVENT_CSV = 2
EVENT_BYTE = 3
EVENT_WORD = 4
EVENT_DWORD = 5
def __init__(self, nsfile, eid, info):
super(EventEntity,self).__init__(eid, nsfile, info)
@property
def event_type(self):
return self._info['EventType']
@property
def csv_desc(self):
return self._info['CSVDesc']
@property
def max_data_length(self):
return self._info['MaxDataLength']
def get_data (self, index):
lib = self.file.library
data = lib._get_event_data (self, index)
return data
|
Use reverse function in tests
|
from django.core.urlresolvers import reverse
import pytest
from saleor.userprofile.impersonate import can_impersonate
from saleor.userprofile.models import User
def test_staff_with_permission_can_impersonate(
staff_client, customer_user, staff_user, permission_impersonate_user,
staff_group):
staff_group.permissions.add(permission_impersonate_user)
staff_user.groups.add(staff_group)
staff_user = User.objects.get(pk=staff_user.pk)
response = staff_client.get(reverse('impersonate-start',
args=[customer_user.pk]), follow=True)
assert response.context['user'] == customer_user
assert response.context['user'].is_impersonate
assert response.context['request'].impersonator == staff_user
|
from django.core.urlresolvers import reverse
import pytest
from saleor.userprofile.impersonate import can_impersonate
from saleor.userprofile.models import User
def test_staff_with_permission_can_impersonate(
staff_client, customer_user, staff_user, permission_impersonate_user,
staff_group):
staff_group.permissions.add(permission_impersonate_user)
staff_user.groups.add(staff_group)
staff_user = User.objects.get(pk=staff_user.pk)
response = staff_client.get('/impersonate/{}'.format(customer_user.pk),
follow=True)
assert response.context['user'] == customer_user
assert response.context['user'].is_impersonate
assert response.context['request'].impersonator == staff_user
|
Use one es6 feature, modules.
|
/* global require, describe, it */
import assert from 'assert';
var urlPrefix = 'http://katas.tddbin.com/katas/es6/language/';
var katasUrl = urlPrefix + '__grouped__.json';
var GroupedKata = require('../src/grouped-kata.js');
describe('load ES6 kata data', function() {
it('loaded data are as expected', function(done) {
function onSuccess(groupedKatas) {
assert.ok(groupedKatas);
done();
}
new GroupedKata(katasUrl).load(function() {}, onSuccess);
});
describe('on error, call error callback and the error passed', function() {
it('invalid JSON', function(done) {
function onError(err) {
assert.ok(err);
done();
}
var invalidUrl = urlPrefix;
new GroupedKata(invalidUrl).load(onError);
});
it('for invalid data', function(done) {
function onError(err) {
assert.ok(err);
done();
}
var invalidData = urlPrefix + '__all__.json';
new GroupedKata(invalidData).load(onError);
});
});
});
|
/* global require, describe, it */
var assert = require('assert');
var urlPrefix = 'http://katas.tddbin.com/katas/es6/language/';
var katasUrl = urlPrefix + '__grouped__.json';
var GroupedKata = require('../src/grouped-kata.js');
describe('load ES6 kata data', function() {
it('loaded data are as expected', function(done) {
function onSuccess(groupedKatas) {
assert.ok(groupedKatas);
done();
}
new GroupedKata(katasUrl).load(function() {}, onSuccess);
});
describe('on error, call error callback and the error passed', function() {
it('invalid JSON', function(done) {
function onError(err) {
assert.ok(err);
done();
}
var invalidUrl = urlPrefix;
new GroupedKata(invalidUrl).load(onError);
});
it('for invalid data', function(done) {
function onError(err) {
assert.ok(err);
done();
}
var invalidData = urlPrefix + '__all__.json';
new GroupedKata(invalidData).load(onError);
});
});
});
|
Update to new more restrictive Slack OAuth2 scopes
All we need to read is users, so just ask for that.
https://api.slack.com/docs/oauth-scopes
|
import Ember from 'ember';
import Base from 'simple-auth/authenticators/base';
import config from 'ttz/config/environment';
export default Base.extend({
torii: null,
restore(data) {
return Ember.RSVP.resolve(data);
},
authenticate(state) {
if (config.environment !== 'production') {
return Ember.RSVP.resolve({
accessToken: 'token',
scope: 'identify,users:read'
});
}
return new Ember.RSVP.Promise((resolve, reject) => {
this.torii.open('slack-oauth2', state).then(data => {
return Ember.$.ajax({
type: 'POST',
url: '/api/tokens',
data,
dataType: 'json'
});
}).then(data => {
resolve(data);
}).catch(error => {
reject(error);
});
});
}
});
|
import Ember from 'ember';
import Base from 'simple-auth/authenticators/base';
import config from 'ttz/config/environment';
export default Base.extend({
torii: null,
restore(data) {
return Ember.RSVP.resolve(data);
},
authenticate(state) {
if (config.environment !== 'production') {
return Ember.RSVP.resolve({
accessToken: 'token',
scope: 'identify,read'
});
}
return new Ember.RSVP.Promise((resolve, reject) => {
this.torii.open('slack-oauth2', state).then(data => {
return Ember.$.ajax({
type: 'POST',
url: '/api/tokens',
data,
dataType: 'json'
});
}).then(data => {
resolve(data);
}).catch(error => {
reject(error);
});
});
}
});
|
Update the camb3lyp example to libxc 5 series
|
#!/usr/bin/env python
#
# Author: Qiming Sun <osirpt.sun@gmail.com>
#
'''Density functional calculations can be run with either the default
backend library, libxc, or an alternative library, xcfun. See also
example 32-xcfun_as_default.py for how to set xcfun as the default XC
functional library.
'''
from pyscf import gto, dft
from pyscf.hessian import uks as uks_hess
from pyscf import tdscf
mol = gto.M(atom="H; F 1 1.", basis='631g')
# Calculation using libxc
mf = dft.UKS(mol)
mf.xc = 'CAMB3LYP'
mf.kernel()
mf.nuc_grad_method().kernel()
# We can also evaluate the geometric hessian
hess = uks_hess.Hessian(mf).kernel()
print(hess.reshape(2,3,2,3))
# or TDDFT gradients
tdks = tdscf.TDA(mf)
tdks.nstates = 3
tdks.kernel()
tdks.nuc_grad_method().kernel()
# Switch to the xcfun library on the fly
mf._numint.libxc = dft.xcfun
# Repeat the geometric hessian
hess = uks_hess.Hessian(mf).kernel()
print(hess.reshape(2,3,2,3))
# and the TDDFT gradient calculation
tdks = tdscf.TDA(mf)
tdks.nstates = 3
tdks.kernel()
tdks.nuc_grad_method().kernel()
|
#!/usr/bin/env python
#
# Author: Qiming Sun <osirpt.sun@gmail.com>
#
'''
The default XC functional library (libxc) supports the energy and nuclear
gradients for range separated functionals. Nuclear Hessian and TDDFT gradients
need xcfun library. See also example 32-xcfun_as_default.py for how to set
xcfun library as the default XC functional library.
'''
from pyscf import gto, dft
mol = gto.M(atom="H; F 1 1.", basis='631g')
mf = dft.UKS(mol)
mf.xc = 'CAMB3LYP'
mf.kernel()
mf.nuc_grad_method().kernel()
from pyscf.hessian import uks as uks_hess
# Switching to xcfun library on the fly
mf._numint.libxc = dft.xcfun
hess = uks_hess.Hessian(mf).kernel()
print(hess.reshape(2,3,2,3))
from pyscf import tdscf
# Switching to xcfun library on the fly
mf._numint.libxc = dft.xcfun
tdks = tdscf.TDA(mf)
tdks.nstates = 3
tdks.kernel()
tdks.nuc_grad_method().kernel()
|
Fix ratio followers/following only displayed for "humans"
|
import os
from libraries.models import Tweet, User
from config import app_config as cfg
from libraries.graphs.graph import Graph
# Twitter API configuration
consumer_key = cfg.twitter["consumer_key"]
consumer_secret = cfg.twitter["consumer_secret"]
access_token = cfg.twitter["access_token"]
access_token_secret = cfg.twitter["access_token_secret"]
# Start
current_path = os.path.dirname(os.path.abspath(__file__))
# Average mentions per user
path ="{}/images/avg_mentions.png".format(current_path)
graph = Graph(path)
avg_mentions_per_user = Tweet.avg_mentions_per_user().values()
avg_mentions_per_bot = Tweet.avg_mentions_per_user(True).values()
graph.avg_tweets(avg_mentions_per_user, avg_mentions_per_bot, path)
path ="{}/images/vocabulary.png".format(current_path)
graph.vocabulary(Tweet.vocabulary_size().values(), Tweet.vocabulary_size(True).values(), path)
path ="{}/images/followers_following.png".format(current_path)
graph.ratio_followers_following(
User.ratio_followers_following_per_users(),
User.ratio_followers_following_per_users(is_bot=True),
path
)
|
import os
from libraries.models import Tweet, User
from config import app_config as cfg
from libraries.graphs.graph import Graph
# Twitter API configuration
consumer_key = cfg.twitter["consumer_key"]
consumer_secret = cfg.twitter["consumer_secret"]
access_token = cfg.twitter["access_token"]
access_token_secret = cfg.twitter["access_token_secret"]
# Start
current_path = os.path.dirname(os.path.abspath(__file__))
# Average mentions per user
path ="{}/images/avg_mentions.png".format(current_path)
graph = Graph(path)
avg_mentions_per_user = Tweet.avg_mentions_per_user().values()
avg_mentions_per_bot = Tweet.avg_mentions_per_user(True).values()
graph.avg_tweets(avg_mentions_per_user, avg_mentions_per_bot, path)
path ="{}/images/vocabulary.png".format(current_path)
graph.vocabulary(Tweet.vocabulary_size().values(), Tweet.vocabulary_size(True).values(), path)
path ="{}/images/followers_following.png".format(current_path)
graph.ratio_followers_following(
User.ratio_followers_following_per_users(),
User.ratio_followers_following_per_users(),
path
)
|
Fix invalid method used to get worlds
|
package openmods.network.targets;
import java.util.Collection;
import java.util.Set;
import net.minecraft.entity.player.EntityPlayerMP;
import net.minecraft.world.WorldServer;
import net.minecraftforge.common.DimensionManager;
import openmods.network.DimCoord;
import openmods.network.IPacketTargetSelector;
import openmods.utils.NetUtils;
import com.google.common.base.Preconditions;
import cpw.mods.fml.common.network.handshake.NetworkDispatcher;
import cpw.mods.fml.relauncher.Side;
public class SelectChunkWatchers implements IPacketTargetSelector {
public static final IPacketTargetSelector INSTANCE = new SelectChunkWatchers();
@Override
public boolean isAllowedOnSide(Side side) {
return side == Side.SERVER;
}
@Override
public void listDispatchers(Object arg, Collection<NetworkDispatcher> result) {
Preconditions.checkArgument(arg instanceof DimCoord, "Argument must be DimCoord");
DimCoord coord = (DimCoord)arg;
WorldServer server = DimensionManager.getWorld(coord.dimension);
Set<EntityPlayerMP> players = NetUtils.getPlayersWatchingBlock(server, coord.x, coord.z);
for (EntityPlayerMP player : players) {
NetworkDispatcher dispatcher = NetUtils.getPlayerDispatcher(player);
result.add(dispatcher);
}
}
}
|
package openmods.network.targets;
import java.util.Collection;
import java.util.Set;
import net.minecraft.entity.player.EntityPlayerMP;
import net.minecraft.server.MinecraftServer;
import net.minecraft.world.WorldServer;
import openmods.network.DimCoord;
import openmods.network.IPacketTargetSelector;
import openmods.utils.NetUtils;
import com.google.common.base.Preconditions;
import cpw.mods.fml.common.network.handshake.NetworkDispatcher;
import cpw.mods.fml.relauncher.Side;
public class SelectChunkWatchers implements IPacketTargetSelector {
public static final IPacketTargetSelector INSTANCE = new SelectChunkWatchers();
@Override
public boolean isAllowedOnSide(Side side) {
return side == Side.SERVER;
}
@Override
public void listDispatchers(Object arg, Collection<NetworkDispatcher> result) {
Preconditions.checkArgument(arg instanceof DimCoord, "Argument must be DimCoord");
DimCoord coord = (DimCoord)arg;
WorldServer server = MinecraftServer.getServer().worldServers[coord.dimension];
Set<EntityPlayerMP> players = NetUtils.getPlayersWatchingBlock(server, coord.x, coord.z);
for (EntityPlayerMP player : players) {
NetworkDispatcher dispatcher = NetUtils.getPlayerDispatcher(player);
result.add(dispatcher);
}
}
}
|
Change version to match RC (v0.27.0)
|
from setuptools import find_packages, setup
setup(
name="redshift-etl",
version="0.27.0",
author="Harry's Data Engineering and Contributors",
description="ETL code to ferry data from PostgreSQL databases (or S3 files) to Redshift cluster",
license="MIT",
keywords="redshift postgresql etl extract transform load",
url="https://github.com/harrystech/harrys-redshift-etl",
package_dir={"": "python"},
packages=find_packages("python"),
package_data={
"etl": [
"assets/*",
"config/*"
]
},
scripts=[
"python/scripts/submit_arthur.sh",
"python/scripts/re_run_partial_pipeline.py"
],
entry_points={
"console_scripts": [
# NB The script must end in ".py" so that spark submit accepts it as a Python script.
"arthur.py = etl.commands:run_arg_as_command",
"run_tests.py = etl.selftest:run_tests"
]
}
)
|
from setuptools import find_packages, setup
setup(
name="redshift-etl",
version="0.27b.1",
author="Harry's Data Engineering and Contributors",
description="ETL code to ferry data from PostgreSQL databases (or S3 files) to Redshift cluster",
license="MIT",
keywords="redshift postgresql etl extract transform load",
url="https://github.com/harrystech/harrys-redshift-etl",
package_dir={"": "python"},
packages=find_packages("python"),
package_data={
"etl": [
"assets/*",
"config/*"
]
},
scripts=[
"python/scripts/submit_arthur.sh",
"python/scripts/re_run_partial_pipeline.py"
],
entry_points={
"console_scripts": [
# NB The script must end in ".py" so that spark submit accepts it as a Python script.
"arthur.py = etl.commands:run_arg_as_command",
"run_tests.py = etl.selftest:run_tests"
]
}
)
|
Fix typo resulting in build issue
|
import * as MuConstants from '../exports/MuConstants'
const service = MuConstants.STORIES_SERVICE_URL
function handleErrors(response) {
if (!response.ok) {
throw Error(response.statusText);
}
return response;
}
// Get stories of a given type - HTTP GET request
export function getStories(type) {
var endpoint = service + type;
return fetch(endpoint, {
method: "GET"
}).then(handleErrors).then(res => res.json())
}
// Add story - HTTP POST request
export function addStory(story) {
var endpoint = service;
return fetch(endpoint, {
method: "POST",
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(story)
}).then(handleErrors).then(res => res.json())
}
// Update story - HTTP PUT request
export function replaceStory(story) {
var endpoint = service + story.key;
return fetch(endpoint, {
method: "PUT",
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(story)
}).then(handleErrors)
}
// Remove story using key - HTTP DELETE request
export function removeStory(key) {
var endpoint = service + key;
return fetch(endpoint, {
method: "DELETE"
}).then(handleErrors)
}
|
envimport * as MuConstants from '../exports/MuConstants'
const service = MuConstants.STORIES_SERVICE_URL
function handleErrors(response) {
if (!response.ok) {
throw Error(response.statusText);
}
return response;
}
// Get stories of a given type - HTTP GET request
export function getStories(type) {
var endpoint = service + type;
return fetch(endpoint, {
method: "GET"
}).then(handleErrors).then(res => res.json())
}
// Add story - HTTP POST request
export function addStory(story) {
var endpoint = service;
return fetch(endpoint, {
method: "POST",
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(story)
}).then(handleErrors).then(res => res.json())
}
// Update story - HTTP PUT request
export function replaceStory(story) {
var endpoint = service + story.key;
return fetch(endpoint, {
method: "PUT",
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(story)
}).then(handleErrors)
}
// Remove story using key - HTTP DELETE request
export function removeStory(key) {
var endpoint = service + key;
return fetch(endpoint, {
method: "DELETE"
}).then(handleErrors)
}
|
Update Url for Owncloud >= 5.X
|
function ownArticle(id) {
try {
var query = "?op=pluginhandler&plugin=owncloud&method=getOwnCloud&id=" + param_escape(id);
console.log(query);
var d = new Date();
var ts = d.getTime();
var w = window.open('backend.php?op=backend&method=loading', 'ttrss_tweet',
"status=0,toolbar=0,location=0,width=600,height=500,scrollbars=1,menubar=0");
new Ajax.Request("backend.php", {
parameters: query,
onComplete: function(transport) {
var ti = JSON.parse(transport.responseText);
var share_url = ti.ownurl + "/apps/bookmarks/addBm.php?output=popup&url=" + param_escape(ti.link)
+ '&title=' + ti.title;
w.location.href = share_url;
} });
} catch (e) {
exception_error("ownArticle", e);
}
}
|
function ownArticle(id) {
try {
var query = "?op=pluginhandler&plugin=owncloud&method=getOwnCloud&id=" + param_escape(id);
console.log(query);
var d = new Date();
var ts = d.getTime();
var w = window.open('backend.php?op=backend&method=loading', 'ttrss_tweet',
"status=0,toolbar=0,location=0,width=600,height=500,scrollbars=1,menubar=0");
new Ajax.Request("backend.php", {
parameters: query,
onComplete: function(transport) {
var ti = JSON.parse(transport.responseText);
var share_url = ti.ownurl + "?app=bookmarks&getfile=addBm.php?output=popup&url=" + param_escape(ti.link);
w.location.href = share_url;
} });
} catch (e) {
exception_error("ownArticle", e);
}
}
|
Fix capitalization on home page text
|
import React, { Component } from 'react';
import DocumentTitle from 'react-document-title';
import PurposeCategoryList from 'containers/PurposeCategoryList';
class HomePage extends Component {
render() {
return (
<DocumentTitle title="Etusivu - Varaamo">
<div>
<h2>Varaa vaivatta kaupungin tiloja ja laitteita</h2>
<p>
Varaustilanteen näet kirjautumatta. Varaaminen edellyttää kirjautumista. Kyseessä on
kokeiluasteella oleva palvelu, jonka kautta varataan kaupunginkirjaston,
nuorisoasiainkeskuksen ja varhaiskasvatusviraston tiloja ja työpisteitä.
</p>
<h2 id="purpose-category-header">Mitä haluat tehdä?</h2>
<PurposeCategoryList />
</div>
</DocumentTitle>
);
}
}
HomePage.propTypes = {};
export default HomePage;
|
import React, { Component } from 'react';
import DocumentTitle from 'react-document-title';
import PurposeCategoryList from 'containers/PurposeCategoryList';
class HomePage extends Component {
render() {
return (
<DocumentTitle title="Etusivu - Varaamo">
<div>
<h2>Varaa vaivatta kaupungin tiloja ja laitteita</h2>
<p>
Varaustilanteen näet kirjautumatta. Varaaminen edellyttää kirjautumista. Kyseessä on
kokeiluasteella oleva palvelu, jonka kautta varataan Kaupunginkirjaston,
Nuorisoasiainkeskuksen ja Varhaiskasvatusviraston tiloja ja työpisteitä.
</p>
<h2 id="purpose-category-header">Mitä haluat tehdä?</h2>
<PurposeCategoryList />
</div>
</DocumentTitle>
);
}
}
HomePage.propTypes = {};
export default HomePage;
|
: Create documentation of DataSource Settings
Task-Url:
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
print AdminControl.getCell()
cell = "/Cell:" + AdminControl.getCell() + "/"
cellid = AdminConfig.getid( cell )
dbs = AdminConfig.list( 'DataSource', str(cellid) )
dbs = dbs.splitlines()
print dbs
for db in dbs.splitlines():
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 )
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
print AdminControl.getCell()
cell = "/Cell:" + AdminControl.getCell() + "/"
cellid = AdminConfig.getid( cell )
dbs = AdminConfig.list( 'DataSource', str(cellid) )
for db in dbs.splitlines().split('('):
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 )
|
Drop references from popped classes.
|
package com.thoughtworks.xstream.core.util;
public final class ClassStack {
private Class[] stack;
private int pointer;
public ClassStack(int initialCapacity) {
stack = new Class[initialCapacity];
}
public void push(Class value) {
if (pointer + 1 >= stack.length) {
resizeStack(stack.length * 2);
}
stack[pointer++] = value;
}
public void popSilently() {
stack[--pointer] = null;
}
public Class pop() {
final Class result = stack[--pointer];
stack[pointer] = null;
return result;
}
public Class peek() {
return pointer == 0 ? null : stack[pointer - 1];
}
public int size() {
return pointer;
}
public Class get(int i) {
return stack[i];
}
private void resizeStack(int newCapacity) {
Class[] newStack = new Class[newCapacity];
System.arraycopy(stack, 0, newStack, 0, Math.min(stack.length, newCapacity));
stack = newStack;
}
}
|
package com.thoughtworks.xstream.core.util;
public final class ClassStack {
private Class[] stack;
private int pointer;
public ClassStack(int initialCapacity) {
stack = new Class[initialCapacity];
}
public void push(Class value) {
if (pointer + 1 >= stack.length) {
resizeStack(stack.length * 2);
}
stack[pointer++] = value;
}
public void popSilently() {
pointer--;
}
public Class pop() {
return stack[--pointer];
}
public Class peek() {
return pointer == 0 ? null : stack[pointer - 1];
}
public int size() {
return pointer;
}
public Class get(int i) {
return stack[i];
}
private void resizeStack(int newCapacity) {
Class[] newStack = new Class[newCapacity];
System.arraycopy(stack, 0, newStack, 0, Math.min(stack.length, newCapacity));
stack = newStack;
}
}
|
Remove some bug on mysql elements.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from system.DBMysql import connect
from system.ConfigLoader import getCfg
import logging
def loginUser(login, password):
''' Try to login a user regarding login/password '''
userContent = None
table = getCfg('MYSQL', 'table')
tableId = getCfg('MYSQL', 'idField')
tableLogin = getCfg('MYSQL', 'loginField')
tablePassword = getCfg('MYSQL', 'passwordField')
con = None
try:
# Starting
con = connect()
cur = con.cursor()
cur.execute(
'SELECT ' + tableId + ' FROM ' + table +
' WHERE ' + tableLogin + '=%s AND ' + tablePassword + '=%s',
(
login,
password
)
)
userContent = cur.fetchone()
if userContent is not None:
userContent = userContent[0]
except Exception as e:
logging.error('loginUser: Error from MySQL => %s' % e)
finally:
if con:
con.close()
return userContent
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from system.DBMysql import connect
from system.ConfigLoader import getCfg
def loginUser(login, password):
''' Try to login a user regarding login/password '''
userContent = None
table = getCfg('MYSQL', 'table')
tableId = getCfg('MYSQL', 'idField')
tableLogin = getCfg('MYSQL', 'loginField')
tablePassword = getCfg('MYSQL', 'passwordField')
try:
# Starting
con = connect()
cur = con.cursor()
cur.execute(
'SELECT ' + tableId + ' FROM ' + table +
' WHERE ' + tableLogin + '=%s AND ' + tablePassword + '=%s',
(
login,
password
)
)
userContent = cur.fetchone()
if userContent is not None:
userContent = userContent[0]
except db.Error as e:
logging.error('loginUser: Error from MySQL => %s' % e)
finally:
if con:
con.close()
return userContent
|
Reorder constants for clarity and add app title constant
|
<?php
define('ENVIRONMENT_LOCAL', 0);
define('ENVIRONMENT_CURRENT', ENVIRONMENT_LOCAL);
//Path constants
define('ROOT_PATH', dirname(__FILE__, 2).'/');
define('INC_PATH', ROOT_PATH.'inc/');
define('VIEWS_PATH', INC_PATH.'views/');
define('CONTROLLERS_PATH', INC_PATH.'controllers/');
//Url Constants
define('DOMAIN', getenv('MULTISITE_DOMAIN'));
define('BASE_URL','http://movies.'.DOMAIN.'/');
define('SUPER_SEARCH_URL', 'http://search.'.DOMAIN.'/index.php?q=');
define('API_URL', BASE_URL.'api/');
define('HOME_URL', BASE_URL.'');
define('RATED_URL', BASE_URL.'rated/');
define('SUGGESTIONS_URL', BASE_URL.'suggestions/');
define('STYLES_URL', BASE_URL.'styles/');
define('SCRIPTS_URL', BASE_URL.'scripts/');
//Application constants
define('APP_TITLE', 'Movie List');
|
<?php
define('ENVIRONMENT_LOCAL', 0);
define('ENVIRONMENT_CURRENT', ENVIRONMENT_LOCAL);
define('DOMAIN', getenv('MULTISITE_DOMAIN'));
define('BASE_URL','http://movies.'.DOMAIN.'/');
define('SUPER_SEARCH_URL', 'http://search.'.DOMAIN.'/index.php?q=');
define('ROOT_PATH', dirname(__FILE__, 2).'/');
define('API_URL', BASE_URL.'api/');
define('HOME_URL', BASE_URL.'');
define('RATED_URL', BASE_URL.'rated/');
define('SUGGESTIONS_URL', BASE_URL.'suggestions/');
define('STYLES_URL', BASE_URL.'styles/');
define('SCRIPTS_URL', BASE_URL.'scripts/');
define('INC_PATH', ROOT_PATH.'inc/');
define('VIEWS_PATH', INC_PATH.'views/');
define('CONTROLLERS_PATH', INC_PATH.'controllers/');
|
Remove the password reset controller
|
<?php
/*
|--------------------------------------------------------------------------
| Application Routes
|--------------------------------------------------------------------------
|
| Here is where you can register all of the routes for an application.
| It's a breeze. Simply tell Laravel the URIs it should respond to
| and give it the Closure to execute when that URI is requested.
|
*/
$_SERVER["HTTP_HOST"] = Config::get('app.host', '');
$_SERVER["SERVER_NAME"] = Config::get('app.host', '');
# stream links
Route::get("/main.mp3", function() {
return Redirect::to("//stream.r-a-d.io/main");
});
Route::get("/stream", function() {
return Redirect::to("//stream.r-a-d.io/main");
});
Route::get("/stream.mp3", function() {
return Redirect::to("//stream.r-a-d.io/main");
});
Route::get("/R-a-dio", function() {
return Redirect::to("//stream.r-a-d.io/main");
});
# API controller
Route::controller("/api", "API");
# Admin controller; adds extra auth + security
Route::controller('/admin', 'Admin');
# Index.
Route::controller('/', 'Home');
|
<?php
/*
|--------------------------------------------------------------------------
| Application Routes
|--------------------------------------------------------------------------
|
| Here is where you can register all of the routes for an application.
| It's a breeze. Simply tell Laravel the URIs it should respond to
| and give it the Closure to execute when that URI is requested.
|
*/
$_SERVER["HTTP_HOST"] = Config::get('app.host', '');
$_SERVER["SERVER_NAME"] = Config::get('app.host', '');
# stream links
Route::get("/main.mp3", function() {
return Redirect::to("//stream.r-a-d.io/main");
});
Route::get("/stream", function() {
return Redirect::to("//stream.r-a-d.io/main");
});
Route::get("/stream.mp3", function() {
return Redirect::to("//stream.r-a-d.io/main");
});
Route::get("/R-a-dio", function() {
return Redirect::to("//stream.r-a-d.io/main");
});
# API controller
Route::controller("/api", "API");
# Admin controller; adds extra auth + security
Route::controller('/admin', 'Admin');
# Artisan password reminder controller
Route::controller('password', 'RemindersController');
# Index.
Route::controller('/', 'Home');
|
Make it possible to disable notifications; disable them
|
package main
import (
"fmt"
"runtime"
"github.com/datawire/ambassador/pkg/supervisor"
)
var (
notifyRAI *RunAsInfo
notifyEnabled = false
)
// Notify displays a desktop banner notification to the user
func Notify(p *supervisor.Process, message string) {
p.Logf("----------------------------------------------------------------------")
p.Logf("NOTIFY: %s", message)
p.Logf("----------------------------------------------------------------------")
if !notifyEnabled {
return
}
if notifyRAI == nil {
var err error
notifyRAI, err = GuessRunAsInfo(p)
if err != nil {
p.Log(err)
notifyRAI = &RunAsInfo{}
}
}
var args []string
switch runtime.GOOS {
case "darwin":
script := fmt.Sprintf("display notification \"Edge Control Daemon\" with title \"%s\"", message)
args = []string{"osascript", "-e", script}
case "linux":
args = []string{"notify-send", "Edge Control Daemon", message}
default:
return
}
cmd := notifyRAI.Command(p, args...)
if err := cmd.Run(); err != nil {
p.Logf("ERROR while notifying: %v", err)
}
}
// MaybeNotify displays a notification only if a value changes
func MaybeNotify(p *supervisor.Process, name string, old, new bool) {
if old != new {
Notify(p, fmt.Sprintf("%s: %t -> %t", name, old, new))
}
}
|
package main
import (
"fmt"
"runtime"
"github.com/datawire/ambassador/pkg/supervisor"
)
var notifyRAI *RunAsInfo
// Notify displays a desktop banner notification to the user
func Notify(p *supervisor.Process, message string) {
if notifyRAI == nil {
var err error
notifyRAI, err = GuessRunAsInfo(p)
if err != nil {
p.Log(err)
notifyRAI = &RunAsInfo{}
}
}
var args []string
switch runtime.GOOS {
case "darwin":
script := fmt.Sprintf("display notification \"Edge Control Daemon\" with title \"%s\"", message)
args = []string{"osascript", "-e", script}
case "linux":
args = []string{"notify-send", "Edge Control Daemon", message}
default:
return
}
p.Logf("NOTIFY: %s", message)
cmd := notifyRAI.Command(p, args...)
if err := cmd.Run(); err != nil {
p.Logf("ERROR while notifying: %v", err)
}
}
// MaybeNotify displays a notification only if a value changes
func MaybeNotify(p *supervisor.Process, name string, old, new bool) {
if old != new {
Notify(p, fmt.Sprintf("%s: %t -> %t", name, old, new))
}
}
|
Patch version bump to .post2
|
#!/usr/bin/python
# -*- coding: UTF-8 -*-
from setuptools import setup, find_packages
import os
try:
import pypandoc
README = pypandoc.convert('README.md', 'rst')
except (IOError, ImportError):
README = ''
setup(name='niprov',
version='0.1.post2',
author='Jasper J.F. van den Bosch',
author_email='japsai@gmail.com',
description='provenance for neuroimaging data',
packages=find_packages(),
url = 'https://github.com/ilogue/niprov',
test_suite="tests",
scripts=['executables/provenance'],
zip_safe=False,
license='BSD',
long_description=README,
classifiers=[
'License :: OSI Approved :: BSD License',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering'],
package_data={'niprov': ['discovery-filter.txt']},
include_package_data=True,
)
|
#!/usr/bin/python
# -*- coding: UTF-8 -*-
from setuptools import setup, find_packages
import os
try:
import pypandoc
README = pypandoc.convert('README.md', 'rst')
except (IOError, ImportError):
README = ''
setup(name='niprov',
version='0.1.post1',
author='Jasper J.F. van den Bosch',
author_email='japsai@gmail.com',
description='provenance for neuroimaging data',
packages=find_packages(),
url = 'https://github.com/ilogue/niprov',
test_suite="tests",
scripts=['executables/provenance'],
zip_safe=False,
license='BSD',
long_description=README,
classifiers=[
'License :: OSI Approved :: BSD License',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering'],
package_data={'niprov': ['discovery-filter.txt']},
include_package_data=True,
)
|
Use nicer name for log level env variable
|
import deviceMiddleware from 'express-device';
import express from 'express';
import bodyParser from 'body-parser';
import winston from 'winston';
import routerRender from './middleware/routerRender';
import errorRender from './middleware/errorRender';
import staticMiddleware from './middleware/static';
const defaultConfig = {
logLevel: process.env.NODE_LOG_LEVEL || 'info',
port: process.env.NODE_PORT || 3000,
};
export default function server(userConfig = {}) {
const app = express();
const config = { ...defaultConfig, ...userConfig };
winston.level = config.logLevel;
app.use('/static', staticMiddleware);
if (process.env.NODE_ENV === 'production') {
app.use('/assets', express.static('dist/frontend'));
}
app.use(deviceMiddleware.capture());
app.use(bodyParser.json());
app.use(bodyParser.json({ type: 'application/json' }));
if (process.env.NODE_ENV !== 'production' && process.env.NODE_ENV !== 'test') {
// eslint-disable-next-line global-require
app.use(require('./middleware/dev').default);
}
app.use(routerRender);
app.use(errorRender);
app.ready = new Promise((resolve) => {
app.server = app.listen(config.port, () => {
winston.log('info', `Started server on port ${config.port}`);
resolve();
});
});
return app;
}
|
import deviceMiddleware from 'express-device';
import express from 'express';
import bodyParser from 'body-parser';
import winston from 'winston';
import routerRender from './middleware/routerRender';
import errorRender from './middleware/errorRender';
import staticMiddleware from './middleware/static';
const defaultConfig = {
logLevel: process.env.NODE_LOGLEVEL || 'info',
port: process.env.NODE_PORT || 3000,
};
export default function server(userConfig = {}) {
const app = express();
const config = { ...defaultConfig, ...userConfig };
winston.level = config.logLevel;
app.use('/static', staticMiddleware);
if (process.env.NODE_ENV === 'production') {
app.use('/assets', express.static('dist/frontend'));
}
app.use(deviceMiddleware.capture());
app.use(bodyParser.json());
app.use(bodyParser.json({ type: 'application/json' }));
if (process.env.NODE_ENV !== 'production' && process.env.NODE_ENV !== 'test') {
// eslint-disable-next-line global-require
app.use(require('./middleware/dev').default);
}
app.use(routerRender);
app.use(errorRender);
app.ready = new Promise((resolve) => {
app.server = app.listen(config.port, () => {
winston.log('info', `Started server on port ${config.port}`);
resolve();
});
});
return app;
}
|
Fix incorrect expected value assigned
|
/*
* Copyright 2016 requery.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.requery.sql;
import io.requery.PersistenceException;
/**
* Exception thrown when the affected row count of a executed statement doesn't match the value
* expected.
*/
public class RowCountException extends PersistenceException {
private final long expected;
private final long actual;
RowCountException(long expected, long actual) {
super("Expected " + expected + " row affected actual " + actual);
this.expected = expected;
this.actual = actual;
}
/**
* @return the expected affected value count
*/
public long getExpected() {
return expected;
}
/**
* @return the actual affected value count
*/
public long getActual() {
return actual;
}
}
|
/*
* Copyright 2016 requery.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.requery.sql;
import io.requery.PersistenceException;
/**
* Exception thrown when the affected row count of a executed statement doesn't match the value
* expected.
*/
public class RowCountException extends PersistenceException {
private final long expected;
private final long actual;
RowCountException(long expected, long actual) {
super("Expected " + expected + " row affected actual " + actual);
this.expected = expected;
this.actual = expected;
}
/**
* @return the expected affected value count
*/
public long getExpected() {
return expected;
}
/**
* @return the actual affected value count
*/
public long getActual() {
return actual;
}
}
|
Remove turn off Error Handler for Production environment
|
<?php defined('MONSTRA_ACCESS') or die('No direct script access.');
/**
* Monstra requires PHP 5.2.3 or greater
*/
if (version_compare(PHP_VERSION, "5.2.3", "<")) {
exit("Monstra requires PHP 5.2.3 or greater.");
}
/**
* Include Monstra Engine
*/
include ROOT . DS .'engine'. DS .'Monstra.php';
/**
* Set Monstra Environment
*
* Monstra has four predefined environments:
* Monstra::DEVELOPMENT - The development environment.
* Monstra::TESTING - The test environment.
* Monstra::STAGING - The staging environment.
* Monstra::PRODUCTION - The production environment.
*/
Monstra::$environment = Monstra::PRODUCTION;
/**
* Report Errors
*/
if (Monstra::$environment == Monstra::PRODUCTION) {
error_reporting(0);
} else {
error_reporting(-1);
}
/**
* Initialize Monstra
*/
Monstra::init();
|
<?php defined('MONSTRA_ACCESS') or die('No direct script access.');
/**
* Report All Errors
*
* By setting error reporting to -1, we essentially force PHP to report
* every error, and this is guranteed to show every error on future
* releases of PHP. This allows everything to be fixed early!
*/
error_reporting(-1);
/**
* Monstra requires PHP 5.2.3 or greater
*/
if (version_compare(PHP_VERSION, "5.2.3", "<")) {
exit("Monstra requires PHP 5.2.3 or greater.");
}
/**
* Include Monstra Engine
*/
include ROOT . DS .'engine'. DS .'Monstra.php';
/**
* Set Monstra Environment
*
* Monstra has four predefined environments:
* Monstra::DEVELOPMENT - The development environment.
* Monstra::TESTING - The test environment.
* Monstra::STAGING - The staging environment.
* Monstra::PRODUCTION - The production environment.
*/
Monstra::$environment = Monstra::PRODUCTION;
/**
* Initialize Monstra
*/
Monstra::init();
|
Rename Address Book to Task Manager
|
package seedu.address.storage;
import java.io.File;
import java.io.FileNotFoundException;
import javax.xml.bind.JAXBException;
import seedu.address.commons.exceptions.DataConversionException;
import seedu.address.commons.util.XmlUtil;
/**
* Stores taskmanager data in an XML file
*/
public class XmlFileStorage {
/**
* Saves the given taskmanager data to the specified file.
*/
public static void saveDataToFile(File file, XmlSerializableTaskManager taskManager)
throws FileNotFoundException {
try {
XmlUtil.saveDataToFile(file, taskManager);
} catch (JAXBException e) {
assert false : "Unexpected exception " + e.getMessage();
}
}
/**
* Returns task manager in the file or an empty task manager
*/
public static XmlSerializableTaskManager loadDataFromSaveFile(File file) throws DataConversionException,
FileNotFoundException {
try {
return XmlUtil.getDataFromFile(file, XmlSerializableTaskManager.class);
} catch (JAXBException e) {
throw new DataConversionException(e);
}
}
}
|
package seedu.address.storage;
import java.io.File;
import java.io.FileNotFoundException;
import javax.xml.bind.JAXBException;
import seedu.address.commons.exceptions.DataConversionException;
import seedu.address.commons.util.XmlUtil;
/**
* Stores taskmanager data in an XML file
*/
public class XmlFileStorage {
/**
* Saves the given taskmanager data to the specified file.
*/
public static void saveDataToFile(File file, XmlSerializableTaskManager addressBook)
throws FileNotFoundException {
try {
XmlUtil.saveDataToFile(file, addressBook);
} catch (JAXBException e) {
assert false : "Unexpected exception " + e.getMessage();
}
}
/**
* Returns task manager in the file or an empty task manager
*/
public static XmlSerializableTaskManager loadDataFromSaveFile(File file) throws DataConversionException,
FileNotFoundException {
try {
return XmlUtil.getDataFromFile(file, XmlSerializableTaskManager.class);
} catch (JAXBException e) {
throw new DataConversionException(e);
}
}
}
|
Apply users-permissions policy on the admin routes
|
'use strict';
/**
* Module dependencies
*/
// Public node modules.
const _ = require('lodash');
module.exports = strapi => {
return {
beforeInitialize: function() {
strapi.config.middleware.load.before.unshift('users-permissions');
},
initialize: function(cb) {
_.forEach(strapi.admin.config.routes, value => {
if (_.get(value.config, 'policies')) {
value.config.policies.unshift('plugins.users-permissions.permissions');
}
});
_.forEach(strapi.config.routes, value => {
if (_.get(value.config, 'policies')) {
value.config.policies.unshift('plugins.users-permissions.permissions');
}
});
if (strapi.plugins) {
_.forEach(strapi.plugins, (plugin, name) => {
_.forEach(plugin.config.routes, value => {
if (_.get(value.config, 'policies')) {
value.config.policies.unshift('plugins.users-permissions.permissions');
}
});
});
}
cb();
}
};
};
|
'use strict';
/**
* Module dependencies
*/
// Public node modules.
const _ = require('lodash');
module.exports = strapi => {
return {
beforeInitialize: function() {
strapi.config.middleware.load.before.unshift('users-permissions');
},
initialize: function(cb) {
_.forEach(strapi.config.routes, value => {
if (_.get(value.config, 'policies')) {
value.config.policies.unshift('plugins.users-permissions.permissions');
}
});
if (strapi.plugins) {
_.forEach(strapi.plugins, (plugin, name) => {
_.forEach(plugin.config.routes, value => {
if (_.get(value.config, 'policies')) {
value.config.policies.unshift('plugins.users-permissions.permissions');
}
});
});
}
cb();
}
};
};
|
Allow Pausing Atmo for Video Playback
Was missing in #332
|
pageflow.ConfigurationEditorView.register('video', {
configure: function() {
this.tab('general', function() {
this.group('general');
this.input('additional_title', pageflow.TextInputView);
this.input('additional_description', pageflow.TextAreaInputView, {size: 'short'});
});
this.tab('files', function() {
this.input('video_file_id', pageflow.FileInputView, {
collection: pageflow.videoFiles,
positioning: false
});
this.input('poster_image_id', pageflow.FileInputView, {
collection: pageflow.imageFiles,
positioning: false
});
this.input('thumbnail_image_id', pageflow.FileInputView, {
collection: pageflow.imageFiles,
positioning: false
});
this.input('mobile_poster_image_id', pageflow.FileInputView, {
collection: pageflow.imageFiles,
imagePositioning: false
});
});
this.tab('options', function() {
this.input('autoplay', pageflow.CheckBoxInputView);
if (pageflow.features.isEnabled('auto_change_page')) {
this.input('auto_change_page_on_ended', pageflow.CheckBoxInputView);
}
this.group('options', {canPauseAtmo: true});
});
}
});
|
pageflow.ConfigurationEditorView.register('video', {
configure: function() {
this.tab('general', function() {
this.group('general');
this.input('additional_title', pageflow.TextInputView);
this.input('additional_description', pageflow.TextAreaInputView, {size: 'short'});
});
this.tab('files', function() {
this.input('video_file_id', pageflow.FileInputView, {
collection: pageflow.videoFiles,
positioning: false
});
this.input('poster_image_id', pageflow.FileInputView, {
collection: pageflow.imageFiles,
positioning: false
});
this.input('thumbnail_image_id', pageflow.FileInputView, {
collection: pageflow.imageFiles,
positioning: false
});
this.input('mobile_poster_image_id', pageflow.FileInputView, {
collection: pageflow.imageFiles,
imagePositioning: false
});
});
this.tab('options', function() {
this.input('autoplay', pageflow.CheckBoxInputView);
if (pageflow.features.isEnabled('auto_change_page')) {
this.input('auto_change_page_on_ended', pageflow.CheckBoxInputView);
}
this.group('options');
});
}
});
|
Configure raw file loader for reading texts from files
|
/* eslint import/no-extraneous-dependencies: ["error", {"devDependencies": true}] */
import webpack from 'webpack';
export default {
context: __dirname,
entry: './index.jsx',
output: {
path: `${__dirname}/__build__`,
filename: 'bundle.js',
},
module: {
loaders: [
{ test: /\.jsx?$/, exclude: /node_modules/, loader: 'babel' },
{ test: /\.md$/, exclude: /node_modules/, loader: 'raw-loader' },
],
},
resolve: {
extensions: ['', '.js', '.jsx'],
},
plugins: (() => {
if (process.argv.indexOf('-p') !== -1) {
return [
new webpack.DefinePlugin({
'process.env': {
NODE_ENV: JSON.stringify('production'),
},
}),
new webpack.optimize.UglifyJsPlugin({
output: {
comments: false,
},
}),
];
}
return [];
})(),
};
|
/* eslint import/no-extraneous-dependencies: ["error", {"devDependencies": true}] */
import webpack from 'webpack';
export default {
context: __dirname,
entry: './index.jsx',
output: {
path: `${__dirname}/__build__`,
filename: 'bundle.js',
},
module: {
loaders: [
{ test: /\.jsx?$/, exclude: /node_modules/, loader: 'babel' },
],
},
resolve: {
extensions: ['', '.js', '.jsx'],
},
plugins: (() => {
if (process.argv.indexOf('-p') !== -1) {
return [
new webpack.DefinePlugin({
'process.env': {
NODE_ENV: JSON.stringify('production'),
},
}),
new webpack.optimize.UglifyJsPlugin({
output: {
comments: false,
},
}),
];
}
return [];
})(),
};
|
Add linoleum subprojects to babel defaults
|
/* eslint-disable object-shorthand */
module.exports = {
// We need to make sure that the transpiler hit the linoleum source since we are
// not building to npm or simliar.
ignore: function(filename) {
return ((/node_modules/.test(filename))
&& !(/linoleum(-[^/]*)?\/electron/.test(filename))
&& !(/linoleum(-[^/]*)?\/src/.test(filename))
&& !(/linoleum(-[^/]*)?\/tasks/.test(filename)))
|| (/\$.*\$/.test(filename));
},
sourceMap: 'inline',
auxiliaryCommentBefore: 'istanbul ignore start',
auxiliaryCommentAfter: 'istanbul ignore end',
presets: [
require.resolve('babel-preset-react'),
require.resolve('babel-preset-es2015'),
require.resolve('babel-preset-stage-0')
]
};
|
/* eslint-disable object-shorthand */
module.exports = {
// We need to make sure that the transpiler hit the linoleum source since we are
// not building to npm or simliar.
ignore: function(filename) {
return ((/node_modules/.test(filename))
&& !(/linoleum\/electron/.test(filename))
&& !(/linoleum\/src/.test(filename))
&& !(/linoleum\/tasks/.test(filename)))
|| (/\$.*\$/.test(filename));
},
sourceMap: 'inline',
auxiliaryCommentBefore: 'istanbul ignore start',
auxiliaryCommentAfter: 'istanbul ignore end',
presets: [
require.resolve('babel-preset-react'),
require.resolve('babel-preset-es2015'),
require.resolve('babel-preset-stage-0')
]
};
|
Add anon param to script tag
|
if (document.location.hostname == "localhost") {
var baseurl = "";
} else {
var baseurl = "https://noconfidencevote.openup.org.za";
}
var agent = navigator.userAgent.toLowerCase();
if (agent.includes("mobile") && agent.includes("android")) {
// addEventListener only available in later chrome versions
window.addEventListener("load",function(){
window.addEventListener('error', function(e) {
ga('send', 'event', 'JavaScript Error Parent', e.filename + ': ' + e.lineno, e.message);
});
document.getElementById("contactmps-embed-parent").children[0].setAttribute("height", "3000px");
});
document.write('<div id="contactmps-embed-parent" style="height: 3000px"></div>');
} else {
document.write('<div id="contactmps-embed-parent"></div>');
}
document.write('<script type="text/javascript" src="' + baseurl + '/static/javascript/pym.v1.min.js" crossorigin="anonymous"></script>');
document.write("<script>var pymParent = new pym.Parent('contactmps-embed-parent', '" + baseurl + "/campaign/newsmedia/', {});</script>");
|
if (document.location.hostname == "localhost") {
var baseurl = "";
} else {
var baseurl = "https://noconfidencevote.openup.org.za";
}
var agent = navigator.userAgent.toLowerCase();
if (agent.includes("mobile") && agent.includes("android")) {
// addEventListener only available in later chrome versions
window.addEventListener("load",function(){
window.addEventListener('error', function(e) {
ga('send', 'event', 'JavaScript Error Parent', e.filename + ': ' + e.lineno, e.message);
});
document.getElementById("contactmps-embed-parent").children[0].setAttribute("height", "3000px");
});
document.write('<div id="contactmps-embed-parent" style="height: 3000px"></div>');
} else {
document.write('<div id="contactmps-embed-parent"></div>');
}
document.write('<script type="text/javascript" src="' + baseurl + '/static/javascript/pym.v1.min.js"></script>');
document.write("<script>var pymParent = new pym.Parent('contactmps-embed-parent', '" + baseurl + "/campaign/newsmedia/', {});</script>");
|
Fix inconsistency in StringSource MarshalJSON
The method should be bound to *StringSource like every other type in
the codebase. The unintentional copy did not result in any
behavioral changes.
Signed-off-by: Monis Khan <19f7b2d00144930eab99c4cfd2a8d2d2a225ef67@redhat.com>
|
package v1
import "encoding/json"
// UnmarshalJSON implements the json.Unmarshaller interface.
// If the value is a string, it sets the Value field of the StringSource.
// Otherwise, it is unmarshaled into the StringSourceSpec struct
func (s *StringSource) UnmarshalJSON(value []byte) error {
// If we can unmarshal to a simple string, just set the value
var simpleValue string
if err := json.Unmarshal(value, &simpleValue); err == nil {
s.Value = simpleValue
return nil
}
// Otherwise do the full struct unmarshal
return json.Unmarshal(value, &s.StringSourceSpec)
}
// MarshalJSON implements the json.Marshaller interface.
// If the StringSource contains only a string Value (or is empty), it is marshaled as a JSON string.
// Otherwise, the StringSourceSpec struct is marshaled as a JSON object.
func (s *StringSource) MarshalJSON() ([]byte, error) {
// If we have only a cleartext value set, do a simple string marshal
if s.StringSourceSpec == (StringSourceSpec{Value: s.Value}) {
return json.Marshal(s.Value)
}
// Otherwise do the full struct marshal of the externalized bits
return json.Marshal(s.StringSourceSpec)
}
|
package v1
import "encoding/json"
// UnmarshalJSON implements the json.Unmarshaller interface.
// If the value is a string, it sets the Value field of the StringSource.
// Otherwise, it is unmarshaled into the StringSourceSpec struct
func (s *StringSource) UnmarshalJSON(value []byte) error {
// If we can unmarshal to a simple string, just set the value
var simpleValue string
if err := json.Unmarshal(value, &simpleValue); err == nil {
s.Value = simpleValue
return nil
}
// Otherwise do the full struct unmarshal
return json.Unmarshal(value, &s.StringSourceSpec)
}
// MarshalJSON implements the json.Marshaller interface.
// If the StringSource contains only a string Value (or is empty), it is marshaled as a JSON string.
// Otherwise, the StringSourceSpec struct is marshaled as a JSON object.
func (s StringSource) MarshalJSON() ([]byte, error) {
// If we have only a cleartext value set, do a simple string marshal
if s.StringSourceSpec == (StringSourceSpec{Value: s.Value}) {
return json.Marshal(s.Value)
}
// Otherwise do the full struct marshal of the externalized bits
return json.Marshal(s.StringSourceSpec)
}
|
Fix api server crashing when response isn't an array
|
import request from 'request';
import { parseString } from 'xml2js';
export default function loadFromBGG(req) {
return new Promise((resolve, reject) => {
request({
url: `http://www.boardgamegeek.com/xmlapi2/search?query=${req.query.q}&type=boardgame`,
timeout: 10000
}, (error, response, body) => {
if (error) reject(error);
parseString(body, (err, res) => {
if (err) reject(err);
if (Array.isArray(res.items)) {
resolve(res.items.item.map((el) => {
return {
bggid: el.$.id,
name: el.name[0].$.value,
year: el.yearpublished ? Number(el.yearpublished[0].$.value) : 0
};
}));
} else {
resolve([]);
}
});
});
});
}
|
import request from 'request';
import { parseString } from 'xml2js';
export default function loadFromBGG(req) {
return new Promise((resolve, reject) => {
request({
url: `http://www.boardgamegeek.com/xmlapi2/search?query=${req.query.q}&type=boardgame`,
timeout: 5000
}, (error, response, body) => {
if (error) reject(error);
parseString(body, (err, res) => {
if (err) reject(err);
if (Array.isArray(res.items.item)) {
resolve(res.items.item.map((el) => {
return {
bggid: el.$.id,
name: el.name[0].$.value,
year: el.yearpublished ? Number(el.yearpublished[0].$.value) : 0
};
}));
} else {
resolve([]);
}
});
});
});
}
|
Fix tests on node v4
|
const assert = require('assert');
const http = require('http');
const concat = require('concat-stream');
function createServer() {
return http.createServer((req, res) => {
if (req.method === 'GET' && req.url === '/') {
res.statusCode = 200;
res.setHeader('FoO', 'bar');
res.end('body');
return;
}
if (req.method === 'GET' && req.url === '/?foo=baz') {
res.statusCode = 200;
res.setHeader('FoO', 'baz');
res.end('body');
return;
}
if (req.method === 'POST' && req.url === '/') {
assert(req.headers['content-type'] === 'application/json');
req.pipe(concat(body => {
assert(JSON.parse(body.toString()).foo === 'baz');
res.statusCode = 200;
res.end('json body');
}));
return;
}
}).listen(3000);
}
module.exports = createServer;
|
const assert = require('assert');
const http = require('http');
const concat = require('concat-stream');
function createServer() {
return http.createServer((req, res) => {
const {method, url, headers} = req;
if (method === 'GET' && url === '/') {
res.statusCode = 200;
res.setHeader('FoO', 'bar');
res.end('body');
return;
}
if (method === 'GET' && url === '/?foo=baz') {
res.statusCode = 200;
res.setHeader('FoO', 'baz');
res.end('body');
return;
}
if (method === 'POST' && url === '/') {
assert(headers['content-type'] === 'application/json');
req.pipe(concat(body => {
assert(JSON.parse(body.toString()).foo === 'baz');
res.statusCode = 200;
res.end('json body');
}));
return;
}
}).listen(3000);
}
module.exports = createServer;
|
Fix `currentStateIndex` while time travelling
|
import commitExcessActions from './commitExcessActions';
export function recompute(previousLiftedState, storeState, action, nextActionId = 1, isExcess) {
const actionId = nextActionId - 1;
const liftedState = { ...previousLiftedState };
if (liftedState.currentStateIndex === liftedState.stagedActionIds.length - 1) {
liftedState.currentStateIndex++;
}
liftedState.stagedActionIds = [...liftedState.stagedActionIds, actionId];
liftedState.actionsById = { ...liftedState.actionsById };
if (action.type === 'PERFORM_ACTION') {
liftedState.actionsById[actionId] = action;
} else {
liftedState.actionsById[actionId] = {
action: action.action || action,
timestamp: action.timestamp || Date.now(),
type: 'PERFORM_ACTION'
};
}
liftedState.nextActionId = nextActionId;
liftedState.computedStates = [...liftedState.computedStates, { state: storeState }];
if (isExcess) commitExcessActions(liftedState);
return liftedState;
}
|
import commitExcessActions from './commitExcessActions';
export function recompute(previousLiftedState, storeState, action, nextActionId = 1, isExcess) {
const actionId = nextActionId - 1;
const liftedState = { ...previousLiftedState };
liftedState.stagedActionIds = [...liftedState.stagedActionIds, actionId];
liftedState.actionsById = { ...liftedState.actionsById };
if (action.type === 'PERFORM_ACTION') {
liftedState.actionsById[actionId] = action;
} else {
liftedState.actionsById[actionId] = {
action: action.action || action,
timestamp: action.timestamp || Date.now(),
type: 'PERFORM_ACTION'
};
}
liftedState.nextActionId = nextActionId;
liftedState.computedStates = [...liftedState.computedStates, { state: storeState }];
liftedState.currentStateIndex++;
if (isExcess) commitExcessActions(liftedState);
return liftedState;
}
|
Add ci parameter to get_ci() and push_ci() methods.
|
# coding=utf-8
import types
import config
class Driver(object):
def __init__(self):
self.driver_type = self.__class__.__name__
# Get credentials from conf files for CMDB
pass
def get_driver_type(self):
return self.driver_type
def get_ci(self,ci):
pass
def push_ci(self,ci):
pass
class Itop(Driver):
def get_ci(self,ci):
print "Get from itop"
return True
def push_ci(self):
pass
class Redfish(Driver):
def get_ci(self,ci):
print "Get from redfish"
return True
class Ironic(Driver):
pass
class Mondorescue(Driver):
pass
class Fakecmdb(Driver):
pass
class Fakeprovider(Driver):
def get_ci(self,ci):
import app
# Simulate a driver that will provide Manager data.
# TODO a connect method must be implemented as
# Assuming the connection is ok.
# Now create a manager model from reference model.
ci.ci_type = "Manager"
ci.data = config.alexandria.model.Manager
class DriverCollection(list):
pass
|
# coding=utf-8
import types
import config
class Driver(object):
def __init__(self):
self.driver_type = self.__class__.__name__
# Get credentials from conf files for CMDB
pass
def get_driver_type(self):
return self.driver_type
def get_ci(self):
pass
def push_ci(self):
pass
class Itop(Driver):
def get_ci(self):
print "Get from itop"
return True
def push_ci(self):
pass
class Redfish(Driver):
def get_ci(self):
print "Get from redfish"
return True
pass
class Ironic(Driver):
pass
class Mondorescue(Driver):
pass
class Fakecmdb(Driver):
pass
class Fakeprovider(Driver):
pass
class DriverCollection(list):
pass
|
Align with new 'min' value
|
<?php
namespace Raffle;
class RandomService
{
/**
* Base URL
*/
const BASE_URL = 'https://www.random.org/integer-sets/?sets=1&min=%d&max=%d&num=%d&order=random&format=plain&rnd=new';
/**
* Retrieve a block of random numbers.
*
* @param int $min Minimum amount.
* @param int $max Maximum amount.
* @return array
*/
public function getRandomNumbers($min, $max)
{
// Construct the URL
$url = sprintf(self::BASE_URL, $min, $max, $max);
// Fetch the numbers
$ch = curl_init();
curl_setopt($ch, CURLOPT_URL, $url);
curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1);
curl_setopt($ch, CURLOPT_FOLLOWLOCATION, true);
$data = curl_exec($ch);
// Decode data and return
return explode(" ", trim($data));
}
}
|
<?php
namespace Raffle;
class RandomService
{
/**
* Base URL
*/
const BASE_URL = 'https://www.random.org/integer-sets/?sets=1&min=%d&max=%d&num=%d&order=random&format=plain&rnd=new';
/**
* Retrieve a block of random numbers.
*
* @param int $min Minimum amount.
* @param int $max Maximum amount.
* @return array
*/
public function getRandomNumbers($min, $max)
{
// Construct the URL
$url = sprintf(self::BASE_URL, $min, $max, $max + 1);
// Fetch the numbers
$ch = curl_init();
curl_setopt($ch, CURLOPT_URL, $url);
curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1);
curl_setopt($ch, CURLOPT_FOLLOWLOCATION, true);
$data = curl_exec($ch);
// Decode data and return
return explode(" ", trim($data));
}
}
|
Rename version field in Choice to question
|
from django.db import models
from rest_framework import serializers
class Question(models.Model):
version = models.CharField(primary_key=True, max_length=8)
text = models.TextField()
created_on = models.DateTimeField(auto_now_add=True)
updated_on = models.DateTimeField(auto_now=True)
class Choice(models.Model):
text = models.TextField()
question = models.ForeignKey(Question, on_delete=models.CASCADE)
created_on = models.DateTimeField(auto_now_add=True)
updated_on = models.DateTimeField(auto_now=True)
class Answer(models.Model):
choice = models.ForeignKey(Choice, on_delete=models.CASCADE)
user_id = models.TextField()
created_on = models.DateTimeField(auto_now_add=True)
class ChoiceSerializer(serializers.ModelSerializer):
class Meta:
model = Choice
fields = ('id', 'text', 'version', 'created_on', 'updated_on',)
class QuestionSerializer(serializers.ModelSerializer):
# TODO: create a serializer that returns list of choices for the question
class Meta:
model = Question
fields = ('text', 'version', 'created_on', 'updated_on',)
class AnswerSerializer(serializers.ModelSerializer):
class Meta:
model = Answer
fields = ('id', 'choice_id', 'user_id', 'created_on',)
|
from django.db import models
from rest_framework import serializers
class Question(models.Model):
version = models.CharField(primary_key=True, max_length=8)
text = models.TextField()
created_on = models.DateTimeField(auto_now_add=True)
updated_on = models.DateTimeField(auto_now=True)
class Choice(models.Model):
text = models.TextField()
version = models.ForeignKey(Question, on_delete=models.CASCADE)
created_on = models.DateTimeField(auto_now_add=True)
updated_on = models.DateTimeField(auto_now=True)
class Answer(models.Model):
choice = models.ForeignKey(Choice, on_delete=models.CASCADE)
user_id = models.TextField()
created_on = models.DateTimeField(auto_now_add=True)
class ChoiceSerializer(serializers.ModelSerializer):
class Meta:
model = Choice
fields = ('id', 'text', 'version', 'created_on', 'updated_on',)
class QuestionSerializer(serializers.ModelSerializer):
# TODO: create a serializer that returns list of choices for the question
class Meta:
model = Question
fields = ('text', 'version', 'created_on', 'updated_on',)
class AnswerSerializer(serializers.ModelSerializer):
class Meta:
model = Answer
fields = ('id', 'choice_id', 'user_id', 'created_on',)
|
Upgrade code for uses new api redux-logger
|
import { compose, createStore, applyMiddleware } from 'redux';
import { devTools } from 'redux-devtools';
import rootReducer from '../reducers';
import thunkMiddleware from 'redux-thunk';
import createLogger from 'redux-logger';
let createStoreWithMiddleware;
if (__DEV__) {
let middlewares;
if (__CLIENT__) {
const loggerMiddleware = createLogger();
middlewares = applyMiddleware(
thunkMiddleware,
loggerMiddleware
);
} else {
middlewares = applyMiddleware(
thunkMiddleware
);
}
if (__DEBUG__) {
createStoreWithMiddleware = compose(
middlewares,
devTools()
)(createStore);
} else {
createStoreWithMiddleware = middlewares(createStore);
}
}
if (__PROD__) {
createStoreWithMiddleware = applyMiddleware(
thunkMiddleware
)(createStore);
}
export default function configureStore (initialState) {
const store = createStoreWithMiddleware(rootReducer, initialState);
if (__DEV__ && module.hot) {
module.hot.accept('../reducers', () => {
const nextRootReducer = require('../reducers/index');
store.replaceReducer(nextRootReducer);
});
}
return store;
}
|
import { compose, createStore, applyMiddleware } from 'redux';
import { devTools } from 'redux-devtools';
import rootReducer from '../reducers';
import thunkMiddleware from 'redux-thunk';
import loggerMiddleware from 'redux-logger';
let createStoreWithMiddleware;
if (__DEV__) {
if (__DEBUG__) {
createStoreWithMiddleware = compose(
applyMiddleware(
thunkMiddleware,
loggerMiddleware
),
devTools()
)(createStore);
} else {
createStoreWithMiddleware = applyMiddleware(
thunkMiddleware,
loggerMiddleware
)(createStore);
}
}
if (__PROD__) {
createStoreWithMiddleware = applyMiddleware(
thunkMiddleware
)(createStore);
}
export default function configureStore (initialState) {
const store = createStoreWithMiddleware(rootReducer, initialState);
if (__DEV__ && module.hot) {
module.hot.accept('../reducers', () => {
const nextRootReducer = require('../reducers/index');
store.replaceReducer(nextRootReducer);
});
}
return store;
}
|
[TASK] Switch lastUpdated pages rendering to PageType usage
|
<?php
namespace DreadLabs\VantomasWebsite\Page;
use DreadLabs\VantomasWebsite\Archive\SearchInterface;
use DreadLabs\VantomasWebsite\RssFeed\ConfigurationInterface as RssFeedConfigurationInterface;
use DreadLabs\VantomasWebsite\Sitemap\ConfigurationInterface as SitemapConfiguration;
use DreadLabs\VantomasWebsite\Taxonomy\TagSearchInterface;
interface PageRepositoryInterface {
/**
* Searches for archived (page) nodes by given criteria
*
* @param SearchInterface $search
* @return Page[]
*/
public function findArchived(SearchInterface $search);
/**
* Finds last updated pages of type $pageType
*
* @param PageType $pageType
* @param int $offset
* @param int $limit
* @return Page[]
*/
public function findLastUpdated(PageType $pageType, $offset = 0, $limit = 1);
/**
* Finds all pages with tags
*
* @return Page[]
*/
public function findAllWithTags();
/**
* @param TagSearchInterface $tagSearch
* @return Page[]
*/
public function findAllByTag(TagSearchInterface $tagSearch);
/**
* @param SitemapConfiguration $configuration
* @return Page[]
*/
public function findForSitemapXml(SitemapConfiguration $configuration);
/**
* @param RssFeedConfigurationInterface $configuration
* @return Page[]
*/
public function findAllForRssFeed(RssFeedConfigurationInterface $configuration);
}
|
<?php
namespace DreadLabs\VantomasWebsite\Page;
use DreadLabs\VantomasWebsite\Archive\SearchInterface;
use DreadLabs\VantomasWebsite\RssFeed\ConfigurationInterface as RssFeedConfigurationInterface;
use DreadLabs\VantomasWebsite\Sitemap\ConfigurationInterface as SitemapConfiguration;
use DreadLabs\VantomasWebsite\Taxonomy\TagSearchInterface;
interface PageRepositoryInterface {
/**
* Searches for archived (page) nodes by given criteria
*
* @param SearchInterface $search
* @return Page[]
*/
public function findArchived(SearchInterface $search);
/**
* Finds last updated pages within $parentPageId
*
* @param PageId $parentPageId
* @param int $offset
* @param int $limit
* @return Page[]
*/
public function findLastUpdated(PageId $parentPageId, $offset = 0, $limit = 1);
/**
* Finds all pages with tags
*
* @return Page[]
*/
public function findAllWithTags();
/**
* @param TagSearchInterface $tagSearch
* @return Page[]
*/
public function findAllByTag(TagSearchInterface $tagSearch);
/**
* @param SitemapConfiguration $configuration
* @return Page[]
*/
public function findForSitemapXml(SitemapConfiguration $configuration);
/**
* @param RssFeedConfigurationInterface $configuration
* @return Page[]
*/
public function findAllForRssFeed(RssFeedConfigurationInterface $configuration);
}
|
Fix UnicodeDecodeError on the environments where the default encoding is ascii
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import re
import six
BASIC_LATIN_RE = re.compile(r'[\u0021-\u007E]')
WHITESPACE_RE = re.compile("[\s]+", re.UNICODE)
def force_text(value):
if isinstance(value, six.text_type):
return value
elif isinstance(value, six.string_types):
return six.b(value).decode('utf-8')
else:
value = str(value)
return value if isinstance(value, six.text_type) else value.decode('utf-8')
def basic_latin_to_fullwidth(value):
"""
基本ラテン文字を全角に変換する
U+0021..U+007FはU+FF01..U+FF5Eに対応しているので
コードポイントに差分の0xFEE0を足す
"""
_value = value.replace(' ', '\u3000')
return BASIC_LATIN_RE.sub(lambda x: unichr(ord(x.group(0)) + 0xFEE0), _value)
def aggregate_whitespace(value):
return ' '.join(WHITESPACE_RE.split(value))
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import re
import six
BASIC_LATIN_RE = re.compile(r'[\u0021-\u007E]')
WHITESPACE_RE = re.compile("[\s]+", re.UNICODE)
def force_text(value):
if isinstance(value, six.text_type):
return value
elif isinstance(value, six.string_types):
return six.b(value).decode()
else:
value = str(value)
return value if isinstance(value, six.text_type) else value.decode()
def basic_latin_to_fullwidth(value):
"""
基本ラテン文字を全角に変換する
U+0021..U+007FはU+FF01..U+FF5Eに対応しているので
コードポイントに差分の0xFEE0を足す
"""
_value = value.replace(' ', '\u3000')
return BASIC_LATIN_RE.sub(lambda x: unichr(ord(x.group(0)) + 0xFEE0), _value)
def aggregate_whitespace(value):
return ' '.join(WHITESPACE_RE.split(value))
|
Add key to list item
|
import React, { PropTypes } from 'react';
import './index.css';
const SearchResults = React.createClass({
propTypes: {
displayOptions: PropTypes.object.isRequired,
},
render() {
const { options, selectionIndex, onOptionSelected } = this.props;
return (
<div className="SearchResults">
<ul className="SearchResults-list">
{options.map((option, index) => {
const classNames = ['SearchResults-listItem'];
if (selectionIndex === index) {
classNames.push('SearchResults-listItem__active');
}
return (
<li
className={classNames.join(' ')}
key={option.id}
onClick={() => { onOptionSelected(option); }}
>
{option.name}
{option.type}
</li>);
})}
</ul>
</div>
);
},
});
export default SearchResults;
|
import React, { PropTypes } from 'react';
import './index.css';
const SearchResults = React.createClass({
propTypes: {
displayOptions: PropTypes.object.isRequired,
},
render() {
const { options, selectionIndex, onOptionSelected } = this.props;
return (
<div className="SearchResults">
<ul className="SearchResults-list">
{options.map((option, index) => {
const classNames = ['SearchResults-listItem'];
if (selectionIndex === index) {
classNames.push('SearchResults-listItem__active');
}
return (
<li className={classNames.join(' ')} onClick={() => { onOptionSelected(option); }}>
{option.name}
{option.type}
</li>);
})}
</ul>
</div>
);
},
});
export default SearchResults;
|
Fix incorrect class name in front page
|
/*
Javascript functions for Chalmers.it
-------------------------------------------- */
/**
* Quick and dirty jQuery animation helper plugin
*
* Usage:
* $("#element").jb_animate("<animation name>")
*
* The single parameter, animation name, should be defined
* as a CSS class. The class is applied to the element. The class
* should include a CSS animation. Example:
*
* .shake {
* -webkit-animation: shake 1s ease-out;
* }
*/
$.fn.jb_animate = function(animation) {
return this.each(function() {
$(this).addClass(animation)
.on("webkitAnimationEnd animationend", function() {
$(this).removeClass(animation);
})
});
};
$(function() {
/* Front page functions */
// Shake the login form when clicking the 'Log in' button
$(".home #login-btn").on("click", function(evt) {
evt.preventDefault();
$(".user-area form")
.jb_animate("shake")
.find("input[type='text']:first")
.focus();
});
});
|
/*
Javascript functions for Chalmers.it
-------------------------------------------- */
/**
* Quick and dirty jQuery animation helper plugin
*
* Usage:
* $("#element").jb_animate("<animation name>")
*
* The single parameter, animation name, should be defined
* as a CSS class. The class is applied to the element. The class
* should include a CSS animation. Example:
*
* .shake {
* -webkit-animation: shake 1s ease-out;
* }
*/
$.fn.jb_animate = function(animation) {
return this.each(function() {
$(this).addClass(animation)
.on("webkitAnimationEnd animationend", function() {
$(this).removeClass(animation);
})
});
};
$(function() {
/* Front page functions */
// Shake the login form when clicking the 'Log in' button
$(".home #login-btn").on("click", function(evt) {
evt.preventDefault();
$(".login form")
.jb_animate("shake")
.find("input[type='text']:first")
.focus();
});
});
|
Add some JavaDoc on the search service.
|
/*
* #%L
* Course Signup API
* %%
* Copyright (C) 2010 - 2013 University of Oxford
* %%
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://opensource.org/licenses/ecl2
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package uk.ac.ox.oucs.vle;
/**
* This service handles the updating of documents in the solr server and the querying of that solr server.
*/
public interface SearchService {
/**
* Perform a solr search.
* @param query The parameters to pass to solr server.
* @return A results wrapper, the calling client must call disconnect on this once it's finished with it.
*/
public ResultsWrapper select(String query);
public void addCourseGroup(CourseGroup group);
public void deleteCourseGroup(CourseGroup group);
public void deleteAll();
public void tidyUp();
}
|
/*
* #%L
* Course Signup API
* %%
* Copyright (C) 2010 - 2013 University of Oxford
* %%
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://opensource.org/licenses/ecl2
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package uk.ac.ox.oucs.vle;
import java.io.InputStream;
import java.util.Map;
public interface SearchService {
public ResultsWrapper select(String query);
public void addCourseGroup(CourseGroup group);
public void deleteCourseGroup(CourseGroup group);
public void deleteAll();
public void tidyUp();
}
|
Use the method in test loot tables now
|
package info.u_team.u_team_test.data.provider;
import java.util.function.BiConsumer;
import info.u_team.u_team_core.data.*;
import info.u_team.u_team_test.init.*;
import net.minecraft.util.ResourceLocation;
import net.minecraft.world.storage.loot.LootTable;
public class TestLootTablesProvider extends CommonLootTablesProvider {
public TestLootTablesProvider(GenerationData data) {
super(data);
}
@Override
protected void registerLootTables(BiConsumer<ResourceLocation, LootTable> consumer) {
registerBlock(TestBlocks.BASIC, addFortuneBlockLootTable(TestBlocks.BASIC.get(), TestItems.BASIC.get()), consumer);
registerBlock(TestBlocks.BASIC_TILEENTITY, addTileEntityBlockLootTable(TestBlocks.BASIC_TILEENTITY.get()), consumer);
registerBlock(TestBlocks.BASIC_ENERGY_CREATOR, addTileEntityBlockLootTable(TestBlocks.BASIC_ENERGY_CREATOR.get()), consumer);
registerBlock(TestBlocks.BASIC_FLUID_INVENTORY, addTileEntityBlockLootTable(TestBlocks.BASIC_FLUID_INVENTORY.get()), consumer);
}
}
|
package info.u_team.u_team_test.data.provider;
import java.util.function.BiConsumer;
import info.u_team.u_team_core.data.*;
import info.u_team.u_team_test.init.*;
import net.minecraft.util.ResourceLocation;
import net.minecraft.world.storage.loot.LootTable;
public class TestLootTablesProvider extends CommonLootTablesProvider {
public TestLootTablesProvider(GenerationData data) {
super(data);
}
@Override
protected void registerLootTables(BiConsumer<ResourceLocation, LootTable> consumer) {
registerBlock(TestBlocks.BASIC.get(), addFortuneBlockLootTable(TestBlocks.BASIC.get(), TestItems.BASIC.get()), consumer);
registerBlock(TestBlocks.BASIC_TILEENTITY.get(), addTileEntityBlockLootTable(TestBlocks.BASIC_TILEENTITY.get()), consumer);
registerBlock(TestBlocks.BASIC_ENERGY_CREATOR.get(), addTileEntityBlockLootTable(TestBlocks.BASIC_ENERGY_CREATOR.get()), consumer);
registerBlock(TestBlocks.BASIC_FLUID_INVENTORY.get(), addTileEntityBlockLootTable(TestBlocks.BASIC_FLUID_INVENTORY.get()), consumer);
}
}
|
Add more specific installation dependencies
|
#!/usr/bin/env python
import unittest
from setuptools import setup
def pccora_test_suite():
test_loader = unittest.TestLoader()
test_suite = test_loader.discover('tests', pattern='test_*.py')
return test_suite
test_requirements = []
install_requires = [
'construct==2.5.1'
]
extras_require = {
'scripts_linux': [
'jinja2',
'netcdf4',
'numpy',
'pytz'
],
'scripts_win32': [
'plac'
]
}
setup(name='pccora',
version='0.3',
description='PC-CORA sounding data files parser for Python',
url='http://github.com/niwa/pccora',
author='Bruno P. Kinoshita',
author_email='brunodepaulak@yahoo.com.br',
license='MIT',
keywords=['sounding file', 'radiosonde', 'vaisala', 'pccora', 'atmosphere'],
packages=['pccora'],
zip_safe=False,
test_suite='setup.pccora_test_suite',
tests_require=test_requirements,
install_requires=install_requires,
extras_require=extras_require
)
|
#!/usr/bin/env python
import unittest
from setuptools import setup
def pccora_test_suite():
test_loader = unittest.TestLoader()
test_suite = test_loader.discover('tests', pattern='test_*.py')
return test_suite
test_requirements = []
install_requires = [
'construct==2.5.1'
]
extras_require = {
'scripts': [
'numpy',
'netcdf4'
]
}
setup(name='pccora',
version='0.3',
description='PC-CORA sounding data files parser for Python',
url='http://github.com/niwa/pccora',
author='Bruno P. Kinoshita',
author_email='brunodepaulak@yahoo.com.br',
license='MIT',
keywords=['sounding file', 'radiosonde', 'vaisala', 'pccora', 'atmosphere'],
packages=['pccora'],
zip_safe=False,
tests_require=test_requirements,
test_suite='setup.pccora_test_suite',
install_requires=[
'construct==2.5.1'
] + test_requirements,
extras_require=extras_require
)
|
Replace double quotes with single quotes.
|
require.config({
paths: {
text: 'lib/require-text',
css: 'lib/require-css',
jquery: 'lib/jquery',
underscore: 'lib/underscore',
'underscore.crunch': 'lib/underscore.crunch',
backbone: 'lib/backbone',
marked: 'lib/marked',
'marked.highlight': 'lib/marked.highlight',
moment: 'lib/moment',
highlight: 'lib/highlight',
'disqus.embed': '//livingwithdjango.disqus.com/embed',
'disqus.count': '//livingwithdjango.disqus.com/count'
},
shim: {
highlight: {
deps: ['css!style/highlight.css'],
exports: 'hljs'
}
}
});
require(
[
'backbone', 'models', 'views', 'css!style/main.css'
], function(B, M, V) {
new (B.Router.extend({
routes: {'': 'list', '!/': 'list', '!/*src': 'entry'},
list: function() { new V.List(); },
entry: function(src) { new V.Entry({model: new M.Entry({src: src})}); }
}));
B.history.start({root: '/living-with-django/'});
}
);
|
require.config({
paths: {
text: 'lib/require-text',
css: 'lib/require-css',
jquery: 'lib/jquery',
underscore: 'lib/underscore',
'underscore.crunch': 'lib/underscore.crunch',
backbone: 'lib/backbone',
marked: 'lib/marked',
'marked.highlight': 'lib/marked.highlight',
moment: 'lib/moment',
highlight: 'lib/highlight',
"disqus.embed": '//livingwithdjango.disqus.com/embed',
"disqus.count": '//livingwithdjango.disqus.com/count'
},
shim: {
highlight: {
deps: ['css!style/highlight.css'],
exports: 'hljs'
}
}
});
require(
[
'backbone', 'models', 'views', 'css!style/main.css'
], function(B, M, V) {
new (B.Router.extend({
routes: {'': 'list', '!/': 'list', '!/*src': 'entry'},
list: function() { new V.List(); },
entry: function(src) { new V.Entry({model: new M.Entry({src: src})}); }
}));
B.history.start({root: '/living-with-django/'});
}
);
|
Fix event listener redundancy on machine "status" event.
|
var fs = require('fs');
var machine = require('../machine').machine;
var log=require('../log').logger("websocket");
var socketio = require('socket.io');
var clients_limit = 3;
var nb_clients=0;
function broadcast_status_report(clients_sockets){
machine.on('status',function(status){
clients_sockets.emit('status',status);
});
}
connect = function(socket) {
nb_clients++;
if (nb_clients<=clients_limit){ // avoid too many connection on the app.
socket_main(socket);
}
socket.on('disconnect', function() {
socket_close(socket);
nb_clients--;
});
};
function socket_main(socket){
log.debug("client connected");
}
function socket_close(socket){
log.debug("client disconnected");
}
module.exports = function(server) {
server.io.on('connection', connect);
broadcast_status_report(server.io.sockets);
};
|
var fs = require('fs');
var machine = require('../machine').machine;
var log=require('../log').logger("websocket");
var socketio = require('socket.io');
var clients_limit = 1;
var nb_clients=0;
function bind_status_report(socket){
machine.on('status',function(status){
socket.broadcast.emit('status',status);
});
}
connect = function(socket) {
log.debug("new client by websocket");
nb_clients++;
if (nb_clients<=clients_limit){ // avoid too many connection on the app.
socket_main(socket);
}
socket.on('disconnect', function() {
log.debug("client disconnected");
nb_clients--;
});
};
function socket_main(socket){
bind_status_report(socket);
}
module.exports = function(server) {
server.io.on('connection', connect);
};
|
Improve source maps for JS
|
import path from 'path';
import webpack from 'webpack';
import CommonsChunkPlugin from 'webpack/lib/optimize/CommonsChunkPlugin';
const PROD = process.env.NODE_ENV || 0;
module.exports = {
devtool: PROD ? false : 'eval-cheap-module-source-map',
entry: {
app: './app/assets/scripts/App.js',
vendor: [
'picturefill',
'./app/assets/_compiled/modernizr'
]
},
output: {
path: __dirname + '/app/assets/_compiled',
publicPath: '/assets/_compiled/',
filename: '[name].js',
chunkFilename: '_chunk/[name]_[chunkhash].js'
},
plugins: [
new webpack.optimize.UglifyJsPlugin({
compress: PROD ? true : false,
output: {
comments: false
}
}),
new CommonsChunkPlugin({
children: true,
minChunks: 2
})
],
module: {
loaders: [
{
test: /\.js$/,
exclude: /node_modules/,
loader: 'babel-loader'
}
]
}
};
|
import path from 'path';
import webpack from 'webpack';
import CommonsChunkPlugin from 'webpack/lib/optimize/CommonsChunkPlugin';
const PROD = process.env.NODE_ENV || 0;
module.exports = {
devtool: PROD ? false : 'eval',
entry: {
app: './app/assets/scripts/App.js',
vendor: [
'picturefill',
'./app/assets/_compiled/modernizr'
]
},
output: {
path: __dirname + '/app/assets/_compiled',
publicPath: '/assets/_compiled/',
filename: '[name].js',
chunkFilename: '_chunk/[name]_[chunkhash].js'
},
plugins: [
new webpack.optimize.UglifyJsPlugin({
compress: PROD ? true : false,
output: {
comments: false
}
}),
new CommonsChunkPlugin({
children: true,
minChunks: 2
})
],
module: {
loaders: [
{
test: /\.js$/,
exclude: /node_modules/,
loader: 'babel-loader'
}
]
}
};
|
Add log line when we successfully created the socket
|
package main
import (
"net"
"os/exec"
"strings"
)
var (
socketName = "/tmp/dinit.sock"
socketMaxLen = 512
)
func startCommand(c net.Conn) {
buf := make([]byte, socketMaxLen)
n, err := c.Read(buf)
defer c.Close()
if err != nil {
logPrintf("socket: error reading data: %s", err)
return
}
cmdargs := strings.Fields(string(buf[0:n]))
commands := Args(cmdargs)
run(commands, true)
}
func socket() {
l, err := net.Listen("unix", socketName)
if err != nil {
logFatalf("socket: listen error: %s", err)
}
logPrintf("socket: successfully created")
for {
fd, err := l.Accept()
if err != nil {
logPrintf("socket: accept error: %s", err)
continue
}
go startCommand(fd)
}
}
func write(cmds []*exec.Cmd) error {
c, err := net.Dial("unix", socketName)
if err != nil {
return err
}
str := String(cmds)
_, err = c.Write([]byte(str))
if err != nil {
return err
}
return nil
}
|
package main
import (
"net"
"os/exec"
"strings"
)
var (
socketName = "/tmp/dinit.sock"
socketMaxLen = 512
)
func startCommand(c net.Conn) {
buf := make([]byte, socketMaxLen)
n, err := c.Read(buf)
defer c.Close()
if err != nil {
logPrintf("socket: error reading data: %s", err)
return
}
cmdargs := strings.Fields(string(buf[0:n]))
commands := Args(cmdargs)
run(commands, true)
}
func socket() {
l, err := net.Listen("unix", socketName)
if err != nil {
logFatalf("socket: listen error: %s", err)
}
for {
fd, err := l.Accept()
if err != nil {
logPrintf("socket: accept error: %s", err)
continue
}
go startCommand(fd)
}
}
func write(cmds []*exec.Cmd) error {
c, err := net.Dial("unix", socketName)
if err != nil {
return err
}
str := String(cmds)
_, err = c.Write([]byte(str))
if err != nil {
return err
}
return nil
}
|
Add reloadItems to masonry update function
|
function updateMasonry() {
var container = $('.masonry-row').masonry({
itemSelector : '.masonry-item',
columnWidth: '.grid-sizer',
percentPosition: true
});
container.imagesLoaded(function () {
container.masonry();
});
container.masonry('reloadItems');
container.masonry('layout');
}
$(window).load(function () {
updateMasonry();
});
$(".input-group input").change(function () {
console.log("change");
$(this).removeClass("is-not-empty");
if ($(this).val() == "") {
} else {
$(this).addClass("is-not-empty");
}
});
$(".sidenav-toggle").click(function () {
if ($("body").hasClass("sidebar-active")) {
$("body").removeClass("sidebar-active");
} else {
$("body").addClass("sidebar-active");
}
window.setTimeout(updateMasonry, 300);
});
|
function updateMasonry() {
var $container = $('.row').masonry();
$container.imagesLoaded(function () {
$container.masonry();
});
}
$(window).load(function () {
updateMasonry();
});
$(".input-group input").change(function () {
console.log("change");
$(this).removeClass("is-not-empty");
if ($(this).val() == "") {
} else {
$(this).addClass("is-not-empty");
}
});
$(".sidenav-toggle").click(function () {
if ($("body").hasClass("sidebar-active")) {
$("body").removeClass("sidebar-active");
} else {
$("body").addClass("sidebar-active");
}
window.setTimeout(updateMasonry, 300);
});
|
Change copyright in Apache 2 license to 2013
|
/**
* Copyright © 2011-2013 EMBL - European Bioinformatics Institute
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.ac.ebi.phenotype.dao;
/**
*
* Organisation (institute, company) data access manager interface.
*
* @author Gautier Koscielny (EMBL-EBI) <koscieln@ebi.ac.uk>
* @since February 2012
*/
import java.util.List;
import uk.ac.ebi.phenotype.pojo.Organisation;
public interface OrganisationDAO {
/**
* Get all organisations
* @return all organisations
*/
public List<Organisation> getAllOrganisations();
/**
* Find an organisation by its name.
* @param name the organisation name
* @return the organisation
*/
public Organisation getOrganisationByName(String name);
}
|
/**
* Copyright © 2011-2012 EMBL - European Bioinformatics Institute
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.ac.ebi.phenotype.dao;
/**
*
* Organisation (institute, company) data access manager interface.
*
* @author Gautier Koscielny (EMBL-EBI) <koscieln@ebi.ac.uk>
* @since February 2012
*/
import java.util.List;
import uk.ac.ebi.phenotype.pojo.Organisation;
public interface OrganisationDAO {
/**
* Get all organisations
* @return all organisations
*/
public List<Organisation> getAllOrganisations();
/**
* Find an organisation by its name.
* @param name the organisation name
* @return the organisation
*/
public Organisation getOrganisationByName(String name);
}
|
Fix formatting of if statement
|
/*
* Copyright (c) IBM Corporation (2009). All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.osgi.test.cases.jndi.provider;
import java.util.Hashtable;
import javax.naming.NamingException;
import javax.naming.spi.ObjectFactory;
import org.osgi.test.cases.jndi.provider.CTObjectFactory;
/**
* @version $Revision$ $Date$
*/
public class CTObjectFactoryBuilder implements javax.naming.spi.ObjectFactoryBuilder {
public ObjectFactory createObjectFactory(Object obj, Hashtable var1)
throws NamingException {
if (obj instanceof CTReference) {
return new CTObjectFactory();
} else {
return null;
}
}
}
|
/*
* Copyright (c) IBM Corporation (2009). All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.osgi.test.cases.jndi.provider;
import java.util.Hashtable;
import javax.naming.NamingException;
import javax.naming.spi.ObjectFactory;
import org.osgi.test.cases.jndi.provider.CTObjectFactory;
/**
* @version $Revision$ $Date$
*/
public class CTObjectFactoryBuilder implements javax.naming.spi.ObjectFactoryBuilder {
public ObjectFactory createObjectFactory(Object obj, Hashtable var1)
throws NamingException {
if (obj instanceof CTReference) {
return new CTObjectFactory();
} else {
return null;
}
}
}
|
Change user role_id to nullable and remove index
|
<?php
use Illuminate\Database\Migrations\Migration;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Support\Facades\Schema;
class AddUserRole extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::table('users', function (Blueprint $table) {
$table->integer('role_id')->nullable();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::table('users', function (Blueprint $table) {
$table->dropColumn('role_id');
});
}
}
|
<?php
use Illuminate\Database\Migrations\Migration;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Support\Facades\Schema;
class AddUserRole extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::table('users', function (Blueprint $table) {
$table->integer('role_id')->default(0)->index();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::table('users', function (Blueprint $table) {
$table->dropColumn('role_id');
});
}
}
|
Add img_path field to Incidents table
|
<?php
use Illuminate\Support\Facades\Schema;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class CreateIncidentsTable extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('incidents', function (Blueprint $table) {
$table->increments('id');
$table->date('date');
$table->string('title');
$table->text('description');
$table->string('img_path')->nullable();
$table->timestamps();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::dropIfExists('incidents');
}
}
|
<?php
use Illuminate\Support\Facades\Schema;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class CreateIncidentsTable extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('incidents', function (Blueprint $table) {
$table->increments('id');
$table->date('date');
$table->string('title');
$table->text('description');
$table->timestamps();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::dropIfExists('incidents');
}
}
|
Implement a __getattr__() for mock_wrapper that just returns a lambda that records whatever call was attempted along with the call params.
|
'''Unit test package for package "tws".'''
__copyright__ = "Copyright (c) 2008 Kevin J Bluck"
__version__ = "$Id$"
import socket
from tws import EWrapper
def test_import():
'''Verify successful import of top-level "tws" package'''
import tws
assert tws
class mock_wrapper(EWrapper):
def __init__(self):
self.calldata = []
self.errors = []
def error(self, id, code, text):
self.errors.append((id, code, text))
def __getattr__(self, name):
# Any arbitrary unknown attribute is mapped to a function call which is
# recorded into self.calldata.
return lambda *args, **kwds: self.calldata.append((name, args, kwds))
class mock_socket(object):
def __init__(self):
self._peer = ()
def connect(self, peer, error=False):
if error: raise socket.error()
self._peer = peer
def getpeername(self):
if not self._peer: raise socket.error()
return self._peer
def makefile(self, mode):
return StringIO()
|
'''Unit test package for package "tws".'''
__copyright__ = "Copyright (c) 2008 Kevin J Bluck"
__version__ = "$Id$"
import socket
from tws import EWrapper
def test_import():
'''Verify successful import of top-level "tws" package'''
import tws
assert tws
class mock_wrapper(EWrapper):
def __init__(self):
self.errors = []
def error(self, id, code, text):
self.errors.append((id, code, text))
class mock_socket(object):
def __init__(self):
self._peer = ()
def connect(self, peer, error=False):
if error: raise socket.error()
self._peer = peer
def getpeername(self):
if not self._peer: raise socket.error()
return self._peer
def makefile(self, mode):
return StringIO()
|
Add method to specify required checks
|
<?php
namespace PhpSchool\PhpWorkshop\Exercise;
use PhpSchool\PhpWorkshop\ExerciseDispatcher;
use PhpSchool\PhpWorkshop\Solution\SolutionInterface;
/**
* Class ExerciseInterface
* @package PhpSchool\PhpWorkshop\Exercise
* @author Aydin Hassan <aydin@hotmail.co.uk>
*/
interface ExerciseInterface
{
/**
* @return string
*/
public function getName();
/**
* @return ExerciseType
*/
public function getType();
/**
* @param ExerciseDispatcher $dispatcher
*/
public function configure(ExerciseDispatcher $dispatcher);
/**
* @return string
*/
public function getDescription();
/**
* @return SolutionInterface
*/
public function getSolution();
/**
* @return string
*/
public function getProblem();
/**
* @return void
*/
public function tearDown();
}
|
<?php
namespace PhpSchool\PhpWorkshop\Exercise;
use PhpSchool\PhpWorkshop\Solution\SolutionInterface;
/**
* Class ExerciseInterface
* @package PhpSchool\PhpWorkshop\Exercise
* @author Aydin Hassan <aydin@hotmail.co.uk>
*/
interface ExerciseInterface
{
/**
* @return string
*/
public function getName();
/**
* @return ExerciseType
*/
public function getType();
/**
* @return string
*/
public function getDescription();
/**
* @return SolutionInterface
*/
public function getSolution();
/**
* @return string
*/
public function getProblem();
/**
* @return void
*/
public function tearDown();
}
|
Make filter implement writable interface.
|
<?php
namespace UWDOEM\Framework\Filter;
use Guzzle\Service\Resource\Model;
use Propel\Runtime\ActiveQuery\ModelCriteria;
use UWDOEM\Framework\FilterStatement\FilterStatementInterface;
use UWDOEM\Framework\Writer\WritableInterface;
interface FilterInterface extends WritableInterface {
/**
* @param FilterInterface $filter
*/
function combine(FilterInterface $filter);
/**
* @return string[]
*/
function getFeedback();
/**
* @return string
*/
function getHandle();
/**
* @return FilterInterface
*/
function getNextFilter();
/**
* @return FilterStatementInterface[]
*/
function getStatements();
/**
* @return array
*/
function getOptions();
/**
* @param ModelCriteria $query
* @return ModelCriteria
*/
function queryFilter(ModelCriteria $query);
/**
* @param \UWDOEM\Framework\Row\Row[] $rows
* @return \UWDOEM\Framework\Row\Row[]
*/
function rowFilter(array $rows);
}
|
<?php
namespace UWDOEM\Framework\Filter;
use Guzzle\Service\Resource\Model;
use Propel\Runtime\ActiveQuery\ModelCriteria;
use UWDOEM\Framework\FilterStatement\FilterStatementInterface;
interface FilterInterface {
/**
* @param FilterInterface $filter
*/
function combine(FilterInterface $filter);
/**
* @return string[]
*/
function getFeedback();
/**
* @return string
*/
function getHandle();
/**
* @return FilterInterface
*/
function getNextFilter();
/**
* @return FilterStatementInterface[]
*/
function getStatements();
/**
* @return array
*/
function getOptions();
/**
* @param ModelCriteria $query
* @return ModelCriteria
*/
function queryFilter(ModelCriteria $query);
/**
* @param \UWDOEM\Framework\Row\Row[] $rows
* @return \UWDOEM\Framework\Row\Row[]
*/
function rowFilter(array $rows);
}
|
Fix issue with relative stylesheets across files.
Fixes #13.
|
'use strict';
var gutil = require('gulp-util'),
through = require('through2'),
juice = require('juice2');
module.exports = function(opt){
return through.obj(function (file, enc, cb) {
var _opt = JSON.parse(JSON.stringify(opt || {}));
// 'url' option is required
// set it automatically if not provided
if (!_opt.url) {
_opt.url = 'file://' + file.path;
}
if (file.isStream()) {
this.emit('error', new gutil.PluginError('gulp-inline-css', 'Streaming not supported'));
return cb();
}
juice.juiceContent(file.contents, _opt, function(err, html) {
if (err) {
this.emit('error', new gutil.PluginError('gulp-inline-css', err));
}
file.contents = new Buffer(String(html));
this.push(file);
return cb();
}.bind(this));
});
};
|
'use strict';
var gutil = require('gulp-util');
var through = require('through2');
var juice = require('juice2');
module.exports = function(opt){
return through.obj(function (file, enc, cb) {
opt = opt || {};
// 'url' option is required
// set it automatically if not provided
opt.url = opt.url || 'file://' + file.path;
if (file.isStream()) {
this.emit('error', new gutil.PluginError('gulp-inline-css', 'Streaming not supported'));
return cb();
}
juice.juiceContent(file.contents, opt, function(err, html) {
if (err) {
this.emit('error', new gutil.PluginError('gulp-inline-css', err));
}
file.contents = new Buffer(String(html));
this.push(file);
return cb();
}.bind(this));
});
};
|
Check that first row contains th
Because the first child of our `<tr>` could be a text node, or maybe coder wants to intermix `<td>` with `<th>`, need to check more than just the first child for our `<th>`.
|
var Issue = require('../issue');
module.exports = {
name: 'table-req-header',
on: ['tag'],
filter: ['table'],
desc: [
'If set, each `table` tag must contain a header: a `thead` tag',
'or a `tr` tag with a `th` child.'
].join('\n')
};
module.exports.lint = function (ele, opts) {
var children = ele.children,
childIndex = 0,
child;
//ffwd to first relevant table child
while ((child = children[childIndex]) &&
(
child.name === undefined || // skip text nodes
(child.name && child.name.match(/(caption|colgroup)/i))
)
) {
childIndex++;
}
if (child && child.name && child.name.match(/thead/i)) {
return [];
}
if (child && child.name && child.name.match(/tr/i)) {
// Check if any child in first row is `<th>`, not just first child (which could be a text node)
for (var i=0, l=child.children.length; i<l; i++) {
if (child.children[i].name && child.children[i].name == 'th') {
return []
}
}
}
return new Issue('E035', ele.openLineCol);
};
|
var Issue = require('../issue');
module.exports = {
name: 'table-req-header',
on: ['tag'],
filter: ['table'],
desc: [
'If set, each `table` tag must contain a header: a `thead` tag',
'or a `tr` tag with a `th` child.'
].join('\n')
};
module.exports.lint = function (ele, opts) {
var children = ele.children,
childIndex = 0,
child;
//ffwd to first relevant table child
while ((child = children[childIndex]) &&
(
child.name === undefined || // skip text nodes
(child.name && child.name.match(/(caption|colgroup)/i))
)
) {
childIndex++;
}
if (child && child.name &&
((child.name.match(/thead/i)) ||
(child.name.match(/tr/i) && child.children[0].name.match(/th/i)))
) {
return [];
}
return new Issue('E035', ele.openLineCol);
};
|
Refactor SkiDaysCount as a stateless functional component.
|
import '../stylesheets/ui.scss'
const percentToDecimal = (decimal) => {
return ((decimal * 100) + '%')
}
const calcGoalProgress = (total, goal) => {
return percentToDecimal(total/goal)
}
export const SkiDayCount = ({total, powder, backcountry, goal}) => (
<div className="ski-day-count">
<div className="total-days">
<span>{total}</span>
<span>days</span>
</div>
<div className="powder-days">
<span>{powder}</span>
<span>days</span>
</div>
<div className="backcountry-days">
<span>{backcountry}</span>
<span>days</span>
</div>
<div>
<span>
{calcGoalProgress(
total,
goal
)}
</span>
</div>
</div>
)
|
import { Component } from 'react'
import '../stylesheets/ui.scss'
export class SkiDayCount extends Component {
percentToDecimal(decimal) {
return ((decimal * 100) + '%')
}
calcGoalProgress(total, goal) {
return this.percentToDecimal(total/goal)
}
render() {
return (
<div className="ski-day-count">
<div className="total-days">
<span>{this.props.total}</span>
<span>days</span>
</div>
<div className="powder-days">
<span>{this.props.powder}</span>
<span>days</span>
</div>
<div className="backcountry-days">
<span>{this.props.backcountry}</span>
<span>days</span>
</div>
<div>
<span>
{this.calcGoalProgress(
this.props.total,
this.props.goal
)}
</span>
</div>
</div>
)
}
}
|
Update dsub version to 0.4.0
PiperOrigin-RevId: 328430334
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.0'
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.11.dev0'
|
Use interface instead of implementation class
|
/*
* Copyright 2015-2018 the original author or authors.
*
* All rights reserved. This program and the accompanying materials are
* made available under the terms of the Eclipse Public License v2.0 which
* accompanies this distribution and is available at
*
* http://www.eclipse.org/legal/epl-v20.html
*/
package example;
// tag::user_guide[]
import java.util.HashMap;
import java.util.Map;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestReporter;
class TestReporterDemo {
@Test
void reportSingleValue(TestReporter testReporter) {
testReporter.publishEntry("a key", "a value");
}
@Test
void reportSeveralValues(TestReporter testReporter) {
Map<String, String> values = new HashMap<>();
values.put("user name", "dk38");
values.put("award year", "1974");
testReporter.publishEntry(values);
}
}
// end::user_guide[]
|
/*
* Copyright 2015-2018 the original author or authors.
*
* All rights reserved. This program and the accompanying materials are
* made available under the terms of the Eclipse Public License v2.0 which
* accompanies this distribution and is available at
*
* http://www.eclipse.org/legal/epl-v20.html
*/
package example;
// tag::user_guide[]
import java.util.HashMap;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestReporter;
class TestReporterDemo {
@Test
void reportSingleValue(TestReporter testReporter) {
testReporter.publishEntry("a key", "a value");
}
@Test
void reportSeveralValues(TestReporter testReporter) {
HashMap<String, String> values = new HashMap<>();
values.put("user name", "dk38");
values.put("award year", "1974");
testReporter.publishEntry(values);
}
}
// end::user_guide[]
|
Fix for sometimes this unit test failing
|
package org.threadly.concurrent;
import static org.junit.Assert.*;
import org.junit.Before;
import org.junit.Test;
import org.threadly.test.TestUtil;
import org.threadly.util.Clock;
public class ClockWrapperTest {
@Before
public void setup() {
Clock.stopClockUpdateThread();
}
@Test
public void getAccurateTimeTest() {
long startTime = ClockWrapper.getAccurateTime();
TestUtil.sleep(10);
long updateTime;
// verify getting updates
assertTrue(startTime != (updateTime = ClockWrapper.getAccurateTime()));
// request stop to updates
ClockWrapper.stopForcingUpdate();
TestUtil.sleep(10);
// verify no longer getting updates
assertEquals(updateTime, ClockWrapper.getAccurateTime());
// allow updates again
ClockWrapper.resumeForcingUpdate();
assertTrue(updateTime != ClockWrapper.getAccurateTime());
}
@Test
public void getLastKnownTimeAndUpdateTest() {
long originalTime;
assertEquals(originalTime = Clock.lastKnownTimeMillis(), ClockWrapper.getLastKnownTime());
TestUtil.sleep(10);
long updateTime = ClockWrapper.updateClock();
assertTrue(originalTime != updateTime);
assertEquals(Clock.lastKnownTimeMillis(), ClockWrapper.getLastKnownTime());
assertEquals(updateTime, ClockWrapper.getLastKnownTime());
}
}
|
package org.threadly.concurrent;
import static org.junit.Assert.*;
import org.junit.Before;
import org.junit.Test;
import org.threadly.test.TestUtil;
import org.threadly.util.Clock;
public class ClockWrapperTest {
@Before
public void setup() {
Clock.stopClockUpdateThread();
}
@Test
public void getAccurateTimeTest() {
long startTime = ClockWrapper.getAccurateTime();
TestUtil.sleep(10);
long updateTime;
// verify getting updates
assertTrue(startTime != (updateTime = ClockWrapper.getAccurateTime()));
// request stop to updates
ClockWrapper.stopForcingUpdate();
TestUtil.sleep(10);
// verify no longer getting updates
assertEquals(updateTime, ClockWrapper.getAccurateTime());
// allow updates again
ClockWrapper.resumeForcingUpdate();
assertTrue(updateTime != ClockWrapper.getAccurateTime());
}
@Test
public void getLastKnownTimeAndUpdateTest() {
long originalTime;
assertEquals(originalTime = Clock.lastKnownTimeMillis(), ClockWrapper.getLastKnownTime());
long updateTime = ClockWrapper.updateClock();
assertTrue(originalTime != updateTime);
assertEquals(Clock.lastKnownTimeMillis(), ClockWrapper.getLastKnownTime());
assertEquals(updateTime, ClockWrapper.getLastKnownTime());
}
}
|
Use next instead of current
|
<?php
declare(strict_types=1);
require_once(__DIR__.'/BingoBoard.php');
$input = file(__DIR__ . '/input.txt', FILE_IGNORE_NEW_LINES | FILE_SKIP_EMPTY_LINES);
$marks = explode(',',next($input));
// Now, we're processing this file without empty lines - which means there are no spaces between boards
// So we don't need to process those empty lines. Each board is 5x5, so after 5, its a new board
/** @var BingoBoard[] $boards */
$boards = [];
do { // Similar to foreach but this is the block that contains 1 board
$boardData = [];
for($row = 0;$row < 5;++$row) {
$boardData[] = array_map(
static function (string $input) {
return (int)trim($input);
},
str_split(next($input), 3)
);
}
$boards[] = new BingoBoard($boardData);
} while (next($input));
// Time to play Bingo.
foreach ($marks as $mark) {
foreach ($boards as $board) {
$board->mark($mark);
$score = $board->getScore();
if ($score > 0) {
echo "Win! Score: {$score}",PHP_EOL;
exit;
}
}
}
|
<?php
declare(strict_types=1);
require_once(__DIR__.'/BingoBoard.php');
$input = file(__DIR__ . '/input.txt', FILE_IGNORE_NEW_LINES | FILE_SKIP_EMPTY_LINES);
$marks = explode(',',current($input));
next($input); // Drive input forward to the boards
// Now, we're processing this file without empty lines - which means there are no spaces between boards
// So we don't need to process those empty lines. Each board is 5x5, so after 5, its a new board
/** @var BingoBoard[] $boards */
$boards = [];
do { // Similar to foreach but this is the block that contains 1 board
$boardData = [];
for($row = 0;$row < 5;++$row) {
$boardData[] = array_map(
static function (string $input) {
return (int)trim($input);
},
str_split(current($input), 3)
);
next($input);
}
$boards[] = new BingoBoard($boardData);
} while (next($input));
// Time to play Bingo.
foreach ($marks as $mark) {
foreach ($boards as $board) {
$board->mark($mark);
$score = $board->getScore();
if ($score > 0) {
echo "Win! Score: {$score}",PHP_EOL;
exit;
}
}
}
|
:hammer: Watch for changes in all SCSS files
|
/**
* Copyright 2021 The AMP HTML Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
const gulp = require('gulp');
const importDocs = require('./importDocs.js');
const eleventy = require('./eleventy.js');
const lint = require('./lint.js');
const sass = require('./sass.js');
const build = gulp.series(sass, eleventy.build);
const develop = gulp.series(
sass,
gulp.parallel(() => {
gulp.watch('./styles/**/*.scss', sass);
}, eleventy.develop)
);
module.exports = {
importDocs,
lint,
sass,
default: build,
build,
develop,
};
|
/**
* Copyright 2021 The AMP HTML Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
const gulp = require('gulp');
const importDocs = require('./importDocs.js');
const eleventy = require('./eleventy.js');
const lint = require('./lint.js');
const sass = require('./sass.js');
const build = gulp.series(sass, eleventy.build);
const develop = gulp.series(
sass,
gulp.parallel(() => {
gulp.watch('./styles/bento-dev.scss', sass);
}, eleventy.develop)
);
module.exports = {
importDocs,
lint,
sass,
default: build,
build,
develop,
};
|
Transform errors and warnings to exceptions.
|
<?php
/*!
* This file is part of {@link https://github.com/MovLib MovLib}.
*
* Copyright © 2013-present {@link https://movlib.org/ MovLib}.
*
* MovLib is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public
* License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later
* version.
*
* MovLib is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty
* of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License along with MovLib.
* If not, see {@link http://www.gnu.org/licenses/ gnu.org/licenses}.
*/
/**
* @author Richard Fussenegger <richard@fussenegger.info>
* @copyright © 2013 MovLib
* @license http://www.gnu.org/licenses/agpl.html AGPL-3.0
* @link https://movlib.org/
* @since 0.0.1-dev
*/
require __DIR__ . "/lib/autoload.php";
// Transform all kinds of errors, warnings, etc. to exceptions.
set_error_handler(function ($severity, $message, $file, $line) {
throw new \ErrorException($message, $severity, 0, $file, $line);
});
|
<?php
/*!
* This file is part of {@link https://github.com/MovLib MovLib}.
*
* Copyright © 2013-present {@link https://movlib.org/ MovLib}.
*
* MovLib is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public
* License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later
* version.
*
* MovLib is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty
* of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License along with MovLib.
* If not, see {@link http://www.gnu.org/licenses/ gnu.org/licenses}.
*/
/**
* @author Richard Fussenegger <richard@fussenegger.info>
* @copyright © 2013 MovLib
* @license http://www.gnu.org/licenses/agpl.html AGPL-3.0
* @link https://movlib.org/
* @since 0.0.1-dev
*/
require __DIR__ . "/lib/autoload.php";
|
Apply numeric array conversion to every JSON request as well
|
<?php
/* Copyright (C) NAVER <http://www.navercorp.com> */
class JSONDisplayHandler
{
/**
* Produce JSON compliant content given a module object.\n
* @param ModuleObject $oModule the module object
* @return string
*/
public function toDoc($oModule)
{
$variables = $oModule->getVariables();
$variables['error'] = $oModule->getError();
$variables['message'] = $oModule->getMessage();
$temp = array();
foreach ($variables as $key => $value)
{
if (self::_isNumericArray($value))
{
$temp[$key] = array_values($value);
}
else
{
$temp[$key] = $value;
}
}
return json_encode($temp);
}
/**
* Check if an array only has numeric keys.
*
* @param array $array
* @return bool
*/
protected static function _isNumericArray($array)
{
if (!is_array($array) || !count($array))
{
return false;
}
foreach ($array as $key => $value)
{
if (intval($key) != $key)
{
return false;
}
}
return true;
}
}
/* End of file JSONDisplayHandler.class.php */
/* Location: ./classes/display/JSONDisplayHandler.class.php */
|
<?php
/* Copyright (C) NAVER <http://www.navercorp.com> */
class JSONDisplayHandler
{
/**
* Produce JSON compliant content given a module object.\n
* @param ModuleObject $oModule the module object
* @return string
*/
public function toDoc($oModule)
{
$variables = $oModule->getVariables();
$variables['error'] = $oModule->getError();
$variables['message'] = $oModule->getMessage();
if (Context::getRequestMethod() === 'XMLRPC')
{
$temp = array();
foreach ($variables as $key => $value)
{
if (self::_isNumericArray($value))
{
$temp[$key] = array_values($value);
}
else
{
$temp[$key] = $value;
}
}
$variables = $temp;
}
return json_encode($variables);
}
/**
* Check if an array only has numeric keys.
*
* @param array $array
* @return bool
*/
protected static function _isNumericArray($array)
{
if (!is_array($array) || !count($array))
{
return false;
}
foreach ($array as $key => $value)
{
if (intval($key) != $key)
{
return false;
}
}
return true;
}
}
/* End of file JSONDisplayHandler.class.php */
/* Location: ./classes/display/JSONDisplayHandler.class.php */
|
Update to universal Google Analytics
|
<!-- footer -->
<footer class="footer" role="contentinfo">
<!-- copyright -->
<p class="copyright">
© <?php echo date("Y"); ?> Copyright <?php bloginfo('name'); ?>. <?php _e('Powered by', 'html5blank'); ?>
<a href="//wordpress.org" title="WordPress">WordPress</a> & <a href="//html5blank.com" title="HTML5 Blank">HTML5 Blank</a>.
</p>
<!-- /copyright -->
</footer>
<!-- /footer -->
</div>
<!-- /wrapper -->
<?php wp_footer(); ?>
<!-- analytics -->
<script>
(function(f,i,r,e,s,h,l){i['GoogleAnalyticsObject']=s;f[s]=f[s]||function(){
(f[s].q=f[s].q||[]).push(arguments)},f[s].l=1*new Date();h=i.createElement(r),
l=i.getElementsByTagName(r)[0];h.async=1;h.src=e;l.parentNode.insertBefore(h,l)
})(window,document,'script','//www.google-analytics.com/analytics.js','ga');
ga('create', 'UA-XXXXXXXX-XX', 'yourdomain.com');
ga('send', 'pageview');
</script>
</body>
</html>
|
<!-- footer -->
<footer class="footer" role="contentinfo">
<!-- copyright -->
<p class="copyright">
© <?php echo date("Y"); ?> Copyright <?php bloginfo('name'); ?>. <?php _e('Powered by', 'html5blank'); ?>
<a href="//wordpress.org" title="WordPress">WordPress</a> & <a href="//html5blank.com" title="HTML5 Blank">HTML5 Blank</a>.
</p>
<!-- /copyright -->
</footer>
<!-- /footer -->
</div>
<!-- /wrapper -->
<?php wp_footer(); ?>
<!-- analytics -->
<script>
var _gaq=[['_setAccount','UA-XXXXXXXX-XX'],['_trackPageview']];
(function(d,t){var g=d.createElement(t),s=d.getElementsByTagName(t)[0];
g.src=('https:'==location.protocol?'//ssl':'//www')+'.google-analytics.com/ga.js';
s.parentNode.insertBefore(g,s)})(document,'script');
</script>
</body>
</html>
|
Work around rollup bug(?) which dropped this default-exported async function
|
/* global API */
import { unescape } from 'plug-message-split'
import fetch from './fetch'
const jsonHeaders = {
Accept: 'application/json',
'Content-Type': 'application/json'
}
function getBlurb () {
return fetch('/_/users/me', { credentials: 'same-origin' })
.then(res => unescape(res.data[0].blurb))
}
function setBlurb (blurb) {
return fetch('/_/profile/blurb', {
credentials: 'same-origin',
method: 'put',
headers: jsonHeaders,
body: JSON.stringify({ blurb: blurb })
})
}
async function authenticate ({
user = API.getUser().id,
transport
} = {}) {
let oldBlurb
let result
try {
const data = await transport.getToken({ user })
oldBlurb = await getBlurb()
await setBlurb(`_auth_blurb=${data.blurb}`)
result = await transport.verify({ user })
} catch (e) {
throw e
} finally {
if (oldBlurb) {
await setBlurb(oldBlurb)
}
}
if (result && result.token) {
return result
}
}
export default authenticate
|
/* global API */
import { unescape } from 'plug-message-split'
import fetch from './fetch'
const jsonHeaders = {
Accept: 'application/json',
'Content-Type': 'application/json'
}
function getBlurb () {
return fetch('/_/users/me', { credentials: 'same-origin' })
.then(res => unescape(res.data[0].blurb))
}
function setBlurb (blurb) {
return fetch('/_/profile/blurb', {
credentials: 'same-origin',
method: 'put',
headers: jsonHeaders,
body: JSON.stringify({ blurb: blurb })
})
}
export default async function authenticate ({
user = API.getUser().id,
transport
} = {}) {
let oldBlurb
let result
try {
const data = await transport.getToken({ user })
oldBlurb = await getBlurb()
await setBlurb(`_auth_blurb=${data.blurb}`)
result = await transport.verify({ user })
} catch (e) {
throw e
} finally {
if (oldBlurb) {
await setBlurb(oldBlurb)
}
}
if (result && result.token) {
return result
}
}
|
Logging: Deal with the fact that message is now a dict, not an object.
|
from django.conf import settings
from django.core.mail import get_connection, EmailMessage
from celery.task import task
CONFIG = getattr(settings, 'CELERY_EMAIL_TASK_CONFIG', {})
BACKEND = getattr(settings, 'CELERY_EMAIL_BACKEND',
'django.core.mail.backends.smtp.EmailBackend')
TASK_CONFIG = {
'name': 'djcelery_email_send',
'ignore_result': True,
}
TASK_CONFIG.update(CONFIG)
def from_dict(messagedict):
return EmailMessage(**messagedict)
@task(**TASK_CONFIG)
def send_email(message, **kwargs):
logger = send_email.get_logger()
conn = get_connection(backend=BACKEND,
**kwargs.pop('_backend_init_kwargs', {}))
try:
result = conn.send_messages([from_dict(message)])
logger.debug("Successfully sent email message to %r.", message['to'])
return result
except Exception as e:
# catching all exceptions b/c it could be any number of things
# depending on the backend
logger.warning("Failed to send email message to %r, retrying.",
message.to)
send_email.retry(exc=e)
# backwards compat
SendEmailTask = send_email
|
from django.conf import settings
from django.core.mail import get_connection, EmailMessage
from celery.task import task
CONFIG = getattr(settings, 'CELERY_EMAIL_TASK_CONFIG', {})
BACKEND = getattr(settings, 'CELERY_EMAIL_BACKEND',
'django.core.mail.backends.smtp.EmailBackend')
TASK_CONFIG = {
'name': 'djcelery_email_send',
'ignore_result': True,
}
TASK_CONFIG.update(CONFIG)
def from_dict(messagedict):
return EmailMessage(**messagedict)
@task(**TASK_CONFIG)
def send_email(message, **kwargs):
logger = send_email.get_logger()
conn = get_connection(backend=BACKEND,
**kwargs.pop('_backend_init_kwargs', {}))
try:
result = conn.send_messages([from_dict(message)])
logger.debug("Successfully sent email message to %r.", message.to)
return result
except Exception as e:
# catching all exceptions b/c it could be any number of things
# depending on the backend
logger.warning("Failed to send email message to %r, retrying.",
message.to)
send_email.retry(exc=e)
# backwards compat
SendEmailTask = send_email
|
Create a catch-all route and route to the homepage.
Signed-off-by: Robert Dempsey <715b5a941e732be1613fdd9d94dfd8e50c02b187@gmail.com>
|
# -*- coding: utf-8 -*-
from flask import Flask, render_template
from flask.ext.mongoengine import MongoEngine, MongoEngineSessionInterface
import configparser
app = Flask(__name__)
# Security
WTF_CSRF_ENABLED = True
app.config['SECRET_KEY'] = '2bN9UUaBpcjrxR'
# App Config
config = configparser.ConfigParser()
config.read('config/config.ini')
app.config['MONGODB_DB'] = config['MongoDB']['db_name']
app.config['MONGODB_HOST'] = config['MongoDB']['host']
app.config['MONGODB_PORT'] = int(config['MongoDB']['port'])
app.config['MONGODB_USERNAME'] = config['MongoDB']['username']
app.config['MONGODB_PASSWORD'] = config['MongoDB']['password']
db = MongoEngine(app)
def register_blueprints(app):
# Prevents circular imports
from weighttracker.views.measurement_views import measurements
app.register_blueprint(measurements)
from weighttracker.views.inspiration_views import inspirations
app.register_blueprint(inspirations)
register_blueprints(app)
@app.route('/', defaults={'path': ''})
@app.route('/<path:path>')
def catch_all(path):
return render_template('index.html')
if __name__ == '__main__':
app.run()
|
# -*- coding: utf-8 -*-
from flask import Flask
from flask.ext.mongoengine import MongoEngine, MongoEngineSessionInterface
import configparser
app = Flask(__name__)
# Security
WTF_CSRF_ENABLED = True
app.config['SECRET_KEY'] = '2bN9UUaBpcjrxR'
# App Config
config = configparser.ConfigParser()
config.read('config/config.ini')
app.config['MONGODB_DB'] = config['MongoDB']['db_name']
app.config['MONGODB_HOST'] = config['MongoDB']['host']
app.config['MONGODB_PORT'] = int(config['MongoDB']['port'])
app.config['MONGODB_USERNAME'] = config['MongoDB']['username']
app.config['MONGODB_PASSWORD'] = config['MongoDB']['password']
db = MongoEngine(app)
def register_blueprints(app):
# Prevents circular imports
from weighttracker.views.measurement_views import measurements
app.register_blueprint(measurements)
from weighttracker.views.inspiration_views import inspirations
app.register_blueprint(inspirations)
register_blueprints(app)
if __name__ == '__main__':
app.run()
|
Set stroke properly for hexabins
|
var _ = require('underscore');
var SimpleStyleDefaults = require('./simple-style-defaults');
var defaultFormValues = require('../../../data/default-form-styles.json');
var rampList = require('cartocolor');
module.exports = _.defaults({
_getAggrAttrs: function (geometryType) {
return {
aggregation: {
size: 10,
value: {
operator: 'count',
attribute: ''
}
}
};
},
_getStrokeAttrs: function (geometryType) {
return {
stroke: defaultFormValues.stroke
};
},
_getFillAttrs: function (geometryType) {
return {
fill: {
'color': {
attribute: 'agg_value',
bins: '5',
quantification: 'quantiles',
// TODO: flip the ramp when basemap is black
// range: rampList.ag_GrnYl[5].reverse()
range: _.clone(rampList.ag_GrnYl[5])
}
}
};
}
}, SimpleStyleDefaults);
|
var _ = require('underscore');
var SimpleStyleDefaults = require('./simple-style-defaults');
var defaultFormValues = require('../../../data/default-form-styles.json');
var rampList = require('cartocolor');
module.exports = _.defaults({
_getAggrAttrs: function (geometryType) {
return {
aggregation: {
size: 10,
value: {
operator: 'count',
attribute: ''
}
}
};
},
_getStrokeAttrs: function (geometryType) {
return defaultFormValues.stroke;
},
_getFillAttrs: function (geometryType) {
return {
fill: {
'color': {
attribute: 'agg_value',
bins: '5',
quantification: 'quantiles',
// TODO: flip the ramp when basemap is black
// range: rampList.ag_GrnYl[5].reverse()
range: _.clone(rampList.ag_GrnYl[5])
}
}
};
}
}, SimpleStyleDefaults);
|
Use lowercase for symbol paths
|
#!/usr/bin/env python
import os
import glob
from lib.util import execute, rm_rf, safe_mkdir, s3put, s3_config
SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
SYMBOLS_DIR = 'dist\\symbols'
PDB_LIST = [
'out\\Release\\atom.exe.pdb',
'vendor\\brightray\\vendor\\download\\libchromiumcontent\\Release\\chromiumcontent.dll.pdb',
]
def main():
os.chdir(SOURCE_ROOT)
rm_rf(SYMBOLS_DIR)
safe_mkdir(SYMBOLS_DIR)
for pdb in PDB_LIST:
run_symstore(pdb, SYMBOLS_DIR, 'AtomShell')
bucket, access_key, secret_key = s3_config()
files = glob.glob(SYMBOLS_DIR + '/*.pdb/*/*.pdb')
files = [f.lower() for f in files]
upload_symbols(bucket, access_key, secret_key, files)
def run_symstore(pdb, dest, product):
execute(['symstore', 'add', '/r', '/f', pdb, '/s', dest, '/t', product])
def upload_symbols(bucket, access_key, secret_key, files):
s3put(bucket, access_key, secret_key, SYMBOLS_DIR, 'atom-shell/symbols', files)
if __name__ == '__main__':
import sys
sys.exit(main())
|
#!/usr/bin/env python
import os
import glob
from lib.util import execute, rm_rf, safe_mkdir, s3put, s3_config
SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
SYMBOLS_DIR = 'dist\\symbols'
PDB_LIST = [
'out\\Release\\atom.exe.pdb',
'vendor\\brightray\\vendor\\download\\libchromiumcontent\\Release\\chromiumcontent.dll.pdb',
]
def main():
os.chdir(SOURCE_ROOT)
rm_rf(SYMBOLS_DIR)
safe_mkdir(SYMBOLS_DIR)
for pdb in PDB_LIST:
run_symstore(pdb, SYMBOLS_DIR, 'AtomShell')
bucket, access_key, secret_key = s3_config()
files = glob.glob(SYMBOLS_DIR + '/*.pdb/*/*.pdb')
upload_symbols(bucket, access_key, secret_key, files)
def run_symstore(pdb, dest, product):
execute(['symstore', 'add', '/r', '/f', pdb, '/s', dest, '/t', product])
def upload_symbols(bucket, access_key, secret_key, files):
s3put(bucket, access_key, secret_key, SYMBOLS_DIR, 'atom-shell/symbols', files)
if __name__ == '__main__':
import sys
sys.exit(main())
|
Make the NULL WKT test more specific.
|
<?php
/* vim: set expandtab tabstop=4 shiftwidth=4 softtabstop=4; */
/**
* PHP version 5
*
* @package omeka
* @subpackage nlfeatures
* @author Scholars' Lab <>
* @author Eric Rochester <erochest@virginia.edu>
* @copyright 2011 The Board and Visitors of the University of Virginia
* @license http://www.apache.org/licenses/LICENSE-2.0.html Apache 2 License
*/
?><?php
require_once 'NeatlineFeatures_Test.php';
/**
* This has miscellaneous tests for various bugs.
**/
class NeatlineFeatures_Bug_Test extends NeatlineFeatures_Test
{
/**
* This tests that null WKT data is encoded as an empty string.
*
* @return void
* @author Eric Rochester <erochest@virginia.edu>
**/
public function testNullWkt()
{
$this->dispatch('/items/add');
$this->assertNotQueryContentContains('#element-38//script', 'wkt: null');
}
}
|
<?php
/* vim: set expandtab tabstop=4 shiftwidth=4 softtabstop=4; */
/**
* PHP version 5
*
* @package omeka
* @subpackage nlfeatures
* @author Scholars' Lab <>
* @author Eric Rochester <erochest@virginia.edu>
* @copyright 2011 The Board and Visitors of the University of Virginia
* @license http://www.apache.org/licenses/LICENSE-2.0.html Apache 2 License
*/
?><?php
require_once 'NeatlineFeatures_Test.php';
/**
* This has miscellaneous tests for various bugs.
**/
class NeatlineFeatures_Bug_Test extends NeatlineFeatures_Test
{
/**
* This tests that null WKT data is encoded as an empty string.
*
* @return void
* @author Eric Rochester <erochest@virginia.edu>
**/
public function testNullWkt()
{
$this->dispatch('/items/add');
$this->assertNotQueryContentContains('#element-38//script', 'null');
}
}
|
Replace e removed during build by _error
|
module.exports = function(grunt) {
return grunt.registerMultiTask('mkdir', 'Make directories.', function() {
var options;
options = this.options({
mode: null,
create: []
});
grunt.verbose.writeflags(options, 'Options');
return options.create.forEach(function(filepath) {
grunt.log.write('Creating "' + filepath + '"...');
try {
filepath = grunt.template.process(filepath);
grunt.file.mkdir(filepath, options.mode);
return grunt.log.ok();
} catch (e) {
grunt.log.error();
grunt.verbose.error(e);
return grunt.fail.warn('Mkdir operation failed.');
}
});
});
};
|
module.exports = function(grunt) {
return grunt.registerMultiTask('mkdir', 'Make directories.', function() {
var options;
options = this.options({
mode: null,
create: []
});
grunt.verbose.writeflags(options, 'Options');
return options.create.forEach(function(filepath) {
var e;
grunt.log.write('Creating "' + filepath + '"...');
try {
filepath = grunt.template.process(filepath);
grunt.file.mkdir(filepath, options.mode);
return grunt.log.ok();
} catch (_error) {
e = _error;
grunt.log.error();
grunt.verbose.error(e);
return grunt.fail.warn('Mkdir operation failed.');
}
});
});
};
|
Fix problems with element identification
|
(function($) {
var params = (function() {
var url_tokens = ('' + document.location.search).replace(/\?/, '').split(/&/g),
params = {};
for(piece in url_tokens) {
var slice = url_tokens[piece].split(/=/);
params[slice[0]] = decodeURIComponent(slice[1]);
}
return params;
})();
$(function() {
$("#contentPane div:first div:eq(1)").trigger('click');
var reloader = function() {
var div = $("#contentPane div[contenteditable]");
if ( !div.length ) {
setTimeout(reloader, 200);
} else {
var text = params['title'].replace(/\+/g, ' ') + ': ' + params['href'];
div.text(text);
}
};
setTimeout(reloader, 200);
});
})(jQuery);
|
(function($) {
var params = (function() {
var url_tokens = document.URL.replace(document.baseURI.concat('?'), '').split(/&/g),
params = {};
for(piece in url_tokens) {
var slice = url_tokens[piece].split(/=/);
params[slice[0]] = decodeURIComponent(slice[1]);
}
return params;
})();
$(function() {
$(".n-Nd").trigger('click');
var reloader = function() {
var div = $("div.n-Ob div[contenteditable]");
if ( !div.length ) {
setTimeout(reloader, 200);
} else {
var text = params['title'].replace(/\+/g, ' ') + ': ' + params['href'];
div.text(text);
}
};
setTimeout(reloader, 200);
});
})(jQuery);
|
Fix unit test failing on case sensitivity
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import types
from os.path import abspath, dirname, join
from epubcheck import utils, samples
TEST_DIR = abspath(dirname(samples.__file__))
def test_utils_java_version():
assert utils.java_version().startswith('java version')
def test_epubcheck_help():
assert 'listChecks' in utils.epubcheck_help()
def test_epubcheck_version():
assert utils.epubcheck_version().startswith('EPUBCheck v4.2.1')
def test_iter_files_simple():
gen = utils.iter_files(TEST_DIR, ['py'])
assert isinstance(gen, types.GeneratorType)
assert len(list(gen)) == 1
def test_iter_files_no_matches():
gen = utils.iter_files(TEST_DIR, ['noext'])
assert len(list(gen)) == 0
def test_iter_files_flat():
gen = utils.iter_files(TEST_DIR, ['epub'])
assert len(list(gen)) == 2
gen = utils.iter_files(TEST_DIR, ['EPUB'])
assert len(list(gen)) == 2
def test_iter_files_recursive():
gen = utils.iter_files(join('../', TEST_DIR), ['epub'], recursive=True)
assert len(list(gen)) == 2
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import types
from os.path import abspath, dirname, join
from epubcheck import utils, samples
TEST_DIR = abspath(dirname(samples.__file__))
def test_utils_java_version():
assert utils.java_version().startswith('java version')
def test_epubcheck_help():
assert 'listChecks' in utils.epubcheck_help()
def test_epubcheck_version():
assert utils.epubcheck_version().startswith('EpubCheck v4.0.1')
def test_iter_files_simple():
gen = utils.iter_files(TEST_DIR, ['py'])
assert isinstance(gen, types.GeneratorType)
assert len(list(gen)) == 1
def test_iter_files_no_matches():
gen = utils.iter_files(TEST_DIR, ['noext'])
assert len(list(gen)) == 0
def test_iter_files_flat():
gen = utils.iter_files(TEST_DIR, ['epub'])
assert len(list(gen)) == 2
gen = utils.iter_files(TEST_DIR, ['EPUB'])
assert len(list(gen)) == 2
def test_iter_files_recursive():
gen = utils.iter_files(join('../', TEST_DIR), ['epub'], recursive=True)
assert len(list(gen)) == 2
|
Revert "Update manager for Person requirement"
This reverts commit 1f7c21280b7135f026f1ff807ffc50c97587f6fd.
|
# Django
from django.contrib.auth.models import BaseUserManager
class UserManager(BaseUserManager):
def create_user(self, email, password='', **kwargs):
user = self.model(
email=email,
password='',
is_active=True,
**kwargs
)
user.save(using=self._db)
return user
def create_superuser(self, email, password, **kwargs):
user = self.model(
email=email,
is_staff=True,
is_active=True,
**kwargs
)
user.set_password(password)
user.save(using=self._db)
return user
|
# Django
from django.contrib.auth.models import BaseUserManager
class UserManager(BaseUserManager):
def create_user(self, email, password='', person, **kwargs):
user = self.model(
email=email,
password='',
person=person,
is_active=True,
**kwargs
)
user.save(using=self._db)
return user
def create_superuser(self, email, password, person, **kwargs):
user = self.model(
email=email,
person=person,
is_staff=True,
is_active=True,
**kwargs
)
user.set_password(password)
user.save(using=self._db)
return user
|
Revert "Add require for global installation"
This reverts commit 586d35539e88ce7ade09c8b38ae501e3b81c74d7.
|
<?php
if ( !defined( 'WP_CLI' ) ) return;
global $argv;
$env = $argv[1];
$config = array();
$config_path = getenv( 'HOME' ) . '/.wp-cli/config.yml';
if ( is_readable( $config_path ) ){
$configurator = \WP_CLI::get_configurator();
$configurator->merge_yml( $config_path );
list( $config, $extra_config ) = $configurator->to_array();
}
if ( isset($config['color']) && $config['color'] === 'auto' ) {
$colorize = !\cli\Shell::isPiped();
} else {
$colorize = true;
}
if ( isset($config['quiet']) && $config['quiet'] )
$logger = new \WP_CLI\Loggers\Quiet;
else
$logger = new \WP_CLI\Loggers\Regular( $colorize );
\WP_CLI::set_logger( $logger );
try {
$environment = new \ViewOne\Environment();
$environment->run($env);
} catch (Exception $e) {
\WP_CLI::error( $e->getMessage() );
}
|
<?php
if ( !defined( 'WP_CLI' ) ) return;
require_once 'src/ViewOne/Environment.php';
global $argv;
$env = $argv[1];
$config = array();
$config_path = getenv( 'HOME' ) . '/.wp-cli/config.yml';
if ( is_readable( $config_path ) ){
$configurator = \WP_CLI::get_configurator();
$configurator->merge_yml( $config_path );
list( $config, $extra_config ) = $configurator->to_array();
}
if ( isset($config['color']) && $config['color'] === 'auto' ) {
$colorize = !\cli\Shell::isPiped();
} else {
$colorize = true;
}
if ( isset($config['quiet']) && $config['quiet'] )
$logger = new \WP_CLI\Loggers\Quiet;
else
$logger = new \WP_CLI\Loggers\Regular( $colorize );
\WP_CLI::set_logger( $logger );
try {
$environment = new \ViewOne\Environment();
$environment->run($env);
} catch (Exception $e) {
\WP_CLI::error( $e->getMessage() );
}
|
Add Odoo Community Association (OCA) in authors
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# This module copyright (C) 2015 Therp BV <http://therp.nl>.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name": "Piwik analytics",
"version": "1.0",
"author": "Therp BV,Odoo Community Association (OCA)",
"license": "AGPL-3",
"category": "Website",
"summary": "Track website users using piwik",
"depends": [
'website',
],
"data": [
"views/website_config_settings.xml",
"views/website.xml",
'views/templates.xml',
],
"auto_install": False,
"installable": True,
"application": False,
}
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# This module copyright (C) 2015 Therp BV <http://therp.nl>.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name": "Piwik analytics",
"version": "1.0",
"author": "Therp BV",
"license": "AGPL-3",
"category": "Website",
"summary": "Track website users using piwik",
"depends": [
'website',
],
"data": [
"views/website_config_settings.xml",
"views/website.xml",
'views/templates.xml',
],
"auto_install": False,
"installable": True,
"application": False,
}
|
addrs: Fix infinite recursion in AbsInputVariableInstance.String
|
package addrs
import (
"fmt"
)
// InputVariable is the address of an input variable.
type InputVariable struct {
referenceable
Name string
}
func (v InputVariable) String() string {
return "var." + v.Name
}
// AbsInputVariableInstance is the address of an input variable within a
// particular module instance.
type AbsInputVariableInstance struct {
Module ModuleInstance
Variable InputVariable
}
// InputVariable returns the absolute address of the input variable of the
// given name inside the receiving module instance.
func (m ModuleInstance) InputVariable(name string) AbsInputVariableInstance {
return AbsInputVariableInstance{
Module: m,
Variable: InputVariable{
Name: name,
},
}
}
func (v AbsInputVariableInstance) String() string {
if len(v.Module) == 0 {
return v.Variable.String()
}
return fmt.Sprintf("%s.%s", v.Module.String(), v.Variable.String())
}
|
package addrs
import (
"fmt"
)
// InputVariable is the address of an input variable.
type InputVariable struct {
referenceable
Name string
}
func (v InputVariable) String() string {
return "var." + v.Name
}
// AbsInputVariableInstance is the address of an input variable within a
// particular module instance.
type AbsInputVariableInstance struct {
Module ModuleInstance
Variable InputVariable
}
// InputVariable returns the absolute address of the input variable of the
// given name inside the receiving module instance.
func (m ModuleInstance) InputVariable(name string) AbsInputVariableInstance {
return AbsInputVariableInstance{
Module: m,
Variable: InputVariable{
Name: name,
},
}
}
func (v AbsInputVariableInstance) String() string {
if len(v.Module) == 0 {
return v.String()
}
return fmt.Sprintf("%s.%s", v.Module.String(), v.Variable.String())
}
|
Deploy to GitHub pages [ci skip]
|
export Tuttolino404 from "./tuttolino-404.png"
export TuttolinoCompetitor from "./tuttolino-competitor.svg"
export TuttolinoErrorMobile from "./tuttolino-error-mobile.png"
export TuttolinoError from "./tuttolino-error.svg"
export TuttolinoFamilySofa from "./tuttolino-family-sofa.svg"
export TuttolinoFamily from "./tuttolino-family.svg"
export TuttolinoGay from "./tuttolino-gay.svg"
export TuttolinoGlasses from "./tuttolino-glasses.svg"
export TuttolinoHey from "./tuttolino-hey.svg"
export TuttolinoHolmesNoCircle from "./tuttolino-holmes-no_circle.svg"
export TuttolinoHolmes from "./tuttolino-holmes.svg"
export TuttolinoIntroScreens from "./tuttolino-intro-screens.svg"
export TuttolinoSergi from "./tuttolino-sergi.svg"
export TuttolinoSuccess from "./tuttolino-success.svg"
export TuttolinoTablet from "./tuttolino-tablet.svg"
export TuttolinoTuttiFan from "./tuttolino-tutti_fan.svg"
|
export Tuttolino404 from "./tuttolino-404.png";
export TuttolinoCompetitor from "./tuttolino-competitor.svg";
export TuttolinoErrorMobile from "./tuttolino-error-mobile.png";
export TuttolinoError from "./tuttolino-error.svg";
export TuttolinoFamilySofa from "./tuttolino-family-sofa.svg";
export TuttolinoFamily from "./tuttolino-family.svg";
export TuttolinoGay from "./tuttolino-gay.svg";
export TuttolinoGlasses from "./tuttolino-glasses.svg";
export TuttolinoHey from "./tuttolino-hey.svg";
export TuttolinoHolmesNoCircle from "./tuttolino-holmes-no_circle.svg";
export TuttolinoHolmes from "./tuttolino-holmes.svg";
export TuttolinoIntroScreens from "./tuttolino-intro-screens.svg";
export TuttolinoSergi from "./tuttolino-sergi.svg";
export TuttolinoSuccess from "./tuttolino-success.svg";
export TuttolinoTablet from "./tuttolino-tablet.svg";
export TuttolinoTuttiFan from "./tuttolino-tutti_fan.svg";
|
Update serialization tests for tokenizer
|
# coding: utf-8
from __future__ import unicode_literals
from ...util import get_lang_class
from ..util import make_tempdir, assert_packed_msg_equal
import pytest
def load_tokenizer(b):
tok = get_lang_class('en').Defaults.create_tokenizer()
tok.from_bytes(b)
return tok
@pytest.mark.parametrize('text', ["I💜you", "they’re", "“hello”"])
def test_serialize_tokenizer_roundtrip_bytes(en_tokenizer, text):
tokenizer = en_tokenizer
new_tokenizer = load_tokenizer(tokenizer.to_bytes())
assert_packed_msg_equal(new_tokenizer.to_bytes(), tokenizer.to_bytes())
# assert new_tokenizer.to_bytes() == tokenizer.to_bytes()
doc1 = tokenizer(text)
doc2 = new_tokenizer(text)
assert [token.text for token in doc1] == [token.text for token in doc2]
def test_serialize_tokenizer_roundtrip_disk(en_tokenizer):
tokenizer = en_tokenizer
with make_tempdir() as d:
file_path = d / 'tokenizer'
tokenizer.to_disk(file_path)
tokenizer_d = en_tokenizer.from_disk(file_path)
assert tokenizer.to_bytes() == tokenizer_d.to_bytes()
|
# coding: utf-8
from __future__ import unicode_literals
from ..util import make_tempdir
import pytest
@pytest.mark.parametrize('text', ["I can't do this"])
def test_serialize_tokenizer_roundtrip_bytes(en_tokenizer, text):
tokenizer_b = en_tokenizer.to_bytes()
new_tokenizer = en_tokenizer.from_bytes(tokenizer_b)
assert new_tokenizer.to_bytes() == tokenizer_b
doc1 = en_tokenizer(text)
doc2 = new_tokenizer(text)
assert [token.text for token in doc1] == [token.text for token in doc2]
def test_serialize_tokenizer_roundtrip_disk(en_tokenizer):
tokenizer = en_tokenizer
with make_tempdir() as d:
file_path = d / 'tokenizer'
tokenizer.to_disk(file_path)
tokenizer_d = en_tokenizer.from_disk(file_path)
assert tokenizer.to_bytes() == tokenizer_d.to_bytes()
|
Revert "Ignore broken tests until fixed"
This reverts commit 1b5c04a0aec37fb3ecb4910764982eb0f168887f.
|
/*
* Copyright 2013-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.launcher.cli;
import static org.hamcrest.CoreMatchers.containsString;
import static org.junit.Assert.assertThat;
import org.junit.Rule;
import org.junit.Test;
/**
* @author Spencer Gibb
*/
public class LauncherCommandTests {
@Rule
public OutputCapture output = new OutputCapture();
@Test
public void testCreateClassLoaderAndListDeployables() throws Exception {
new LauncherCommand().run("--list");
assertThat(output.toString(), containsString("configserver"));
}
@Test
public void testNonOptionArgsPassedDown() throws Exception {
new LauncherCommand().run("--list", "--", "--spring.profiles.active=test");
assertThat(output.toString(), containsString("foo"));
}
}
|
/*
* Copyright 2013-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.launcher.cli;
import static org.hamcrest.CoreMatchers.containsString;
import static org.junit.Assert.assertThat;
import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
/**
* @author Spencer Gibb
*/
public class LauncherCommandTests {
@Rule
public OutputCapture output = new OutputCapture();
@Test
@Ignore
public void testCreateClassLoaderAndListDeployables() throws Exception {
new LauncherCommand().run("--list");
assertThat(output.toString(), containsString("configserver"));
}
@Test
@Ignore
public void testNonOptionArgsPassedDown() throws Exception {
new LauncherCommand().run("--list", "--", "--spring.profiles.active=test");
assertThat(output.toString(), containsString("foo"));
}
}
|
Use WHIP_SETTINGS environment var for Flask app
|
#!/usr/bin/env python
from flask import Flask, abort, make_response
from socket import inet_aton, error as socket_error
from .db import Database
app = Flask(__name__)
app.config.from_envvar('WHIP_SETTINGS')
db = Database(app.config['DATABASE_DIR'])
@app.route('/ip/<ip>')
def lookup(ip):
try:
k = inet_aton(ip)
except socket_error:
abort(400)
info_as_json = db.lookup(k)
if info_as_json is None:
abort(404)
response = make_response(info_as_json)
response.headers['Content-type'] = 'application/json'
return response
if __name__ == '__main__':
import argparse
import sys
parser = argparse.ArgumentParser()
parser.add_argument('--host', default='localhost')
parser.add_argument('--port', default=5555, type=int)
parser.add_argument('--debug', default=False, action='store_true')
args = parser.parse_args()
try:
app.run(**vars(args))
except KeyboardInterrupt:
sys.stderr.write("Aborting...\n")
|
#!/usr/bin/env python
from flask import Flask, abort, make_response
from socket import inet_aton, error as socket_error
from .db import Database
app = Flask(__name__)
db = Database()
@app.route('/ip/<ip>')
def lookup(ip):
try:
k = inet_aton(ip)
except socket_error:
abort(400)
info_as_json = db.lookup(k)
if info_as_json is None:
abort(404)
response = make_response(info_as_json)
response.headers['Content-type'] = 'application/json'
return response
if __name__ == '__main__':
import argparse
import sys
parser = argparse.ArgumentParser()
parser.add_argument('--host', default='localhost')
parser.add_argument('--port', default=5555, type=int)
parser.add_argument('--debug', default=False, action='store_true')
args = parser.parse_args()
try:
app.run(**vars(args))
except KeyboardInterrupt:
sys.stderr.write("Aborting...\n")
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.