text
stringlengths 16
4.96k
| positive
stringlengths 321
2.24k
| negative
stringlengths 310
2.21k
|
|---|---|---|
Fix glmnet test and add multinomial gradient test
|
import numpy as np
import scipy.sparse as sps
from sklearn.preprocessing import StandardScaler
from numpy.testing import assert_allclose
from pyglmnet import GLM
def test_glmnet():
"""Test glmnet."""
glm = GLM(distr='poisson')
scaler = StandardScaler()
n_samples, n_features = 10000, 100
density = 0.1
# coefficients
beta0 = np.random.rand()
beta = sps.rand(n_features, 1, density=density).toarray()
X_train = np.random.normal(0.0, 1.0, [n_samples, n_features])
y_train = glm.simulate(beta0, beta, X_train)
X_train = scaler.fit_transform(X_train)
glm.fit(X_train, y_train)
beta_ = glm.fit_[-2]['beta'][:]
assert_allclose(beta[:], beta_, atol=0.1) # check fit
density_ = np.sum(beta_ > 0.1) / float(n_features)
assert_allclose(density_, density, atol=0.05) # check density
def test_multinomial_gradient():
"""Gradient of intercept params is different"""
glm = GLM(distr='multinomial')
X = np.array([[1,2,3], [4,5,6]])
y = np.array([1,2])
beta = np.zeros([4, 2])
grad_beta0, grad_beta = glm.grad_L2loss(beta[0], beta[1:], 0, X, y)
assert grad_beta0[0] != grad_beta0[1]
|
import numpy as np
import scipy.sparse as sps
from sklearn.preprocessing import StandardScaler
from numpy.testing import assert_allclose
from pyglmnet import GLM
def test_glmnet():
"""Test glmnet."""
glm = GLM(distr='poisson')
scaler = StandardScaler()
n_samples, n_features = 10000, 100
density = 0.1
# coefficients
beta0 = np.random.rand()
beta = sps.rand(n_features, 1, density=density).toarray()
X_train = np.random.normal(0.0, 1.0, [n_samples, n_features])
y_train = glm.simulate(beta0, beta, X_train)
X_train = scaler.fit_transform(X_train)
glm.fit(X_train, y_train)
beta_ = glm.fit_params[-2]['beta'][:]
assert_allclose(beta[:], beta_, atol=0.1) # check fit
density_ = np.sum(beta_ > 0.1) / float(n_features)
assert_allclose(density_, density, atol=0.05) # check density
|
Replace usage of `formatMessage(findTranslationByKey` with `t`
|
import Ember from 'ember';
import ValidatorsMessages from 'ember-cp-validations/validators/messages';
const { Logger:logger } = Ember;
export default function() {
ValidatorsMessages.reopen({
intl: Ember.inject.service(),
prefix: 'errors',
getDescriptionFor(attribute, context = {}) {
let key = `${this.get('prefix')}.description`;
let intl = this.get('intl');
if (intl && intl.exists(key)) {
return intl.t(key, context);
}
return this._super(...arguments);
},
getMessageFor(type, context = {}) {
let key = `${this.get('prefix')}.${type}`;
let intl = this.get('intl');
if (intl && intl.exists(key)) {
return this.formatMessage(intl.t(key, context));
}
logger.warn(`[ember-intl-cp-validations] Missing translation for validation key: ${key}\nhttp://offirgolan.github.io/ember-cp-validations/docs/messages/index.html`);
return this._super(...arguments);
}
});
}
|
import Ember from 'ember';
import ValidatorsMessages from 'ember-cp-validations/validators/messages';
const { Logger:logger } = Ember;
export default function() {
ValidatorsMessages.reopen({
intl: Ember.inject.service(),
prefix: 'errors',
getDescriptionFor(attribute, context = {}) {
let key = `${this.get('prefix')}.description`;
let intl = this.get('intl');
if (intl && intl.exists(key)) {
return intl.formatMessage(intl.findTranslationByKey(key), context);
}
return this._super(...arguments);
},
getMessageFor(type, context = {}) {
let key = `${this.get('prefix')}.${type}`;
let intl = this.get('intl');
if (intl && intl.exists(key)) {
return this.formatMessage(intl.formatMessage(intl.findTranslationByKey(key), context));
}
logger.warn(`[ember-intl-cp-validations] Missing translation for validation key: ${key}\nhttp://offirgolan.github.io/ember-cp-validations/docs/messages/index.html`);
return this._super(...arguments);
}
});
}
|
Make more similar to original version
|
var Immutable = require('immutable');
var is = Immutable.is.bind(Immutable);
function shallowEqualImmutable(objA, objB) {
if (objA === objB || is(objA, objB)) {
return true;
}
if (typeof objA !== 'object' || objA === null ||
typeof objB !== 'object' || objB === null) {
return false;
}
var keysA = Object.keys(objA);
var keysB = Object.keys(objB);
if (keysA.length !== keysB.length) {
return false;
}
// Test for A's keys different from B.
var bHasOwnProperty = Object.prototype.hasOwnProperty.bind(objB);
for (var i = 0; i < keysA.length; i++) {
if (!bHasOwnProperty(keysA[i]) || !is(objA[keysA[i]], objB[keysA[i]])) {
return false;
}
}
return true;
}
module.exports = shallowEqualImmutable;
|
var Immutable = require('immutable');
var is = Immutable.is.bind(Immutable),
getKeys = Object.keys.bind(Object);
function shallowEqualImmutable(objA, objB) {
if (is(objA, objB)) {
return true;
}
var keysA = getKeys(objA),
keysB = getKeys(objB),
keysAlength = keysA.length,
keysBlength = keysB.length
if(keysAlength !== keysBlength) {
return false;
}
// Test for A's keys different from B.
for(var i = 0; i < keysAlength; i++) {
if (!objB.hasOwnProperty(keysA[i]) || !is(objA[keysA[i]], objB[keysA[i]]) ) {
return false;
}
}
// Now we dont need to test for B's keys missing from A,
// because if length's is same and prev check success - objB hasn't more keys
return true;
}
module.exports = shallowEqualImmutable;
|
Fix VarDumper Server for Dumps where no request exist
|
<?php
namespace FroshProfiler\Components\VarDumper;
use Enlight_Controller_Front;
use Symfony\Component\VarDumper\Dumper\ContextProvider\ContextProviderInterface;
/**
* Class RequestContextProvider
* @author Soner Sayakci <shyim@posteo.de>
*/
class RequestContextProvider implements ContextProviderInterface
{
/**
* @var Enlight_Controller_Front
*/
private $front;
/**
* RequestContextProvider constructor.
* @param Enlight_Controller_Front $front
* @author Soner Sayakci <shyim@posteo.de>
*/
public function __construct(Enlight_Controller_Front $front)
{
$this->front = $front;
}
/**
* @return array|null Context data or null if unable to provide any context
*/
public function getContext()
{
$request = $this->front->Request();
if ($request === null) {
return null;
}
return array(
'uri' => $request->getScheme() . '//' . $request->getHttpHost() . $request->getRequestUri(),
'method' => $request->getMethod(),
'controller' => $request->getControllerName(),
'identifier' => spl_object_hash($request),
);
}
}
|
<?php
namespace FroshProfiler\Components\VarDumper;
use Enlight_Controller_Front;
use Symfony\Component\VarDumper\Dumper\ContextProvider\ContextProviderInterface;
/**
* Class RequestContextProvider
* @author Soner Sayakci <shyim@posteo.de>
*/
class RequestContextProvider implements ContextProviderInterface
{
/**
* @var Enlight_Controller_Front
*/
private $front;
/**
* RequestContextProvider constructor.
* @param Enlight_Controller_Front $front
* @author Soner Sayakci <shyim@posteo.de>
*/
public function __construct(Enlight_Controller_Front $front)
{
$this->front = $front;
}
/**
* @return array|null Context data or null if unable to provide any context
*/
public function getContext()
{
$request = $this->front->Request();
return array(
'uri' => $request->getScheme() . '//' . $request->getHttpHost() . $request->getRequestUri(),
'method' => $request->getMethod(),
'controller' => $request->getControllerName(),
'identifier' => spl_object_hash($request),
);
}
}
|
Allow core jquery-git to be used for unit tests.
|
(function() {
// Insert a script tag pointing at the desired version of jQuery
// Get the version from the url
var jqueryRE = /[\\?&]jquery=([^&#]*)/,
results = jqueryRE.exec( location.search ),
version = "",
jq,
myScriptTag = document.getElementsByTagName( "script" )[document.getElementsByTagName( "script" ).length - 1],
baseUrl = myScriptTag.src.replace( /(.*)\/.*$/, "$1/" ),
url = baseUrl + "jquery-1.7.1.js";
if( results ) {
version = decodeURIComponent(results[results.length - 1].replace(/\+/g, " "));
}
switch( version ) {
case "1.6.4":
url = baseUrl + "jquery-1.6.4.js";
break;
case "git":
url = "http://code.jquery.com/jquery-git.js";
break;
}
document.write( "<script src='" + url + "'></script>" );
if ( parseInt( version.replace( /\./g, "" ), 10 ) < 170 && window.define && window.define.amd ) {
document.write( '<script>define( "jquery", [], function () { return jQuery; } );</script>' );
}
}());
|
(function() {
// Insert a script tag pointing at the desired version of jQuery
// Get the version from the url
var jqueryRE = /[\\?&]jquery=([^&#]*)/,
results = jqueryRE.exec( location.search ),
version = "",
jq,
myScriptTag = document.getElementsByTagName( "script" )[document.getElementsByTagName( "script" ).length - 1],
baseUrl = myScriptTag.src.replace( /(.*)\/.*$/, "$1/" ),
url = baseUrl + "jquery-1.7.1.js";
if( results ) {
version = decodeURIComponent(results[results.length - 1].replace(/\+/g, " "));
}
switch( version ) {
case "1.6.4":
url = baseUrl + "jquery-1.6.4.js";
break;
}
document.write( "<script src='" + url + "'></script>" );
if ( parseInt( version.replace( /\./g, "" ), 10 ) < 170 && window.define && window.define.amd ) {
document.write( '<script>define( "jquery", [], function () { return jQuery; } );</script>' );
}
}());
|
Fix prefix of deprecated property
|
package de.linkvt.bachelor.features.annotations;
import de.linkvt.bachelor.features.Feature;
import de.linkvt.bachelor.features.FeatureCategory;
import org.semanticweb.owlapi.model.OWLAnnotation;
import org.semanticweb.owlapi.model.OWLAxiom;
import org.semanticweb.owlapi.model.OWLObjectProperty;
import org.springframework.stereotype.Component;
@Component
public class OwlDeprecatedPropertyFeature extends Feature {
@Override
public void addToOntology() {
OWLObjectProperty property = featurePool.getExclusiveProperty(":DeprecatedProperty");
addToGenericDomainAndNewRange(property, featurePool.getExclusiveClass(":DeprecatedPropertyRange"));
OWLAnnotation annotation = factory.getOWLAnnotation(factory.getOWLDeprecated(), factory.getOWLLiteral(true));
OWLAxiom deprecatedAxiom = factory.getOWLAnnotationAssertionAxiom(property.getIRI(), annotation);
addAxiomToOntology(deprecatedAxiom);
}
@Override
public String getName() {
return "owl:DeprecatedProperty";
}
@Override
public String getToken() {
return "deprecatedprop";
}
@Override
public FeatureCategory getCategory() {
return FeatureCategory.ANNOTATIONS;
}
}
|
package de.linkvt.bachelor.features.annotations;
import de.linkvt.bachelor.features.Feature;
import de.linkvt.bachelor.features.FeatureCategory;
import org.semanticweb.owlapi.model.OWLAnnotation;
import org.semanticweb.owlapi.model.OWLAxiom;
import org.semanticweb.owlapi.model.OWLObjectProperty;
import org.springframework.stereotype.Component;
@Component
public class OwlDeprecatedPropertyFeature extends Feature {
@Override
public void addToOntology() {
OWLObjectProperty property = featurePool.getExclusiveProperty("::DeprecatedProperty");
addToGenericDomainAndNewRange(property, featurePool.getExclusiveClass(":DeprecatedPropertyRange"));
OWLAnnotation annotation = factory.getOWLAnnotation(factory.getOWLDeprecated(), factory.getOWLLiteral(true));
OWLAxiom deprecatedAxiom = factory.getOWLAnnotationAssertionAxiom(property.getIRI(), annotation);
addAxiomToOntology(deprecatedAxiom);
}
@Override
public String getName() {
return "owl:DeprecatedProperty";
}
@Override
public String getToken() {
return "deprecatedprop";
}
@Override
public FeatureCategory getCategory() {
return FeatureCategory.ANNOTATIONS;
}
}
|
Fix redirect after new pledge form is sent
|
import { connect } from 'react-redux'
import I18n from 'i18n-js'
import { toggleSessionPopup } from '../../UserSession/actions/sessionActions'
import { setEntity } from '../../lib/actions/entityActions'
import PledgeForm from '../components/PledgeForm'
import redirectTo from '../../lib/browser/redirectTo'
const mapStateToProps = function(state, ownProps) {
return {
availableTags: assembleTags(ownProps.tags),
userLoggedIn: state.currentUser !== null,
formId: ['PledgeForm', ownProps.id].join('-'),
}
}
function assembleTags(tags) {
return tags.map(function(tag) {
return {
value: tag.id,
label: I18n.t(`tags.names.${tag.name}`),
}
})
}
const mapDispatchToProps = dispatch => ({
onLinkClick: function(event) {
event.preventDefault()
dispatch(toggleSessionPopup())
window.scrollTo(0, 0)
},
dispatch
})
const mergeProps = (stateProps, dispatchProps, ownProps) => ({
...stateProps,
...dispatchProps,
...ownProps,
afterResponse: json => {
if (json.meta && json.meta.redirect) {
redirectTo(json.meta.redirect)
} else if (json.status === 'success') {
dispatchProps.dispatch(setEntity(stateProps.formId, {}))
}
}
})
export default connect(
mapStateToProps,
mapDispatchToProps,
mergeProps
)(PledgeForm)
|
import { connect } from 'react-redux'
import I18n from 'i18n-js'
import { toggleSessionPopup } from '../../UserSession/actions/sessionActions'
import { setEntity } from '../../lib/actions/entityActions'
import PledgeForm from '../components/PledgeForm'
const mapStateToProps = function(state, ownProps) {
return {
availableTags: assembleTags(ownProps.tags),
userLoggedIn: state.currentUser !== null,
formId: ['PledgeForm', ownProps.id].join('-'),
}
}
function assembleTags(tags) {
return tags.map(function(tag) {
return {
value: tag.id,
label: I18n.t(`tags.names.${tag.name}`),
}
})
}
const mapDispatchToProps = dispatch => ({
onLinkClick: function(event) {
event.preventDefault()
dispatch(toggleSessionPopup())
window.scrollTo(0, 0)
},
dispatch
})
const mergeProps = (stateProps, dispatchProps, ownProps) => ({
...stateProps,
...dispatchProps,
...ownProps,
afterResponse: json => {
console.log(json)
if (json.status === 'success') {
dispatchProps.dispatch(setEntity(stateProps.formId, {}))
}
}
})
export default connect(
mapStateToProps,
mapDispatchToProps,
mergeProps
)(PledgeForm)
|
Replace reserved token if address already exists
|
import { observable, action } from 'mobx';
class ReservedTokenStore {
@observable tokens;
constructor(tokens = []) {
this.tokens = tokens;
}
@action addToken = (token) => {
const currentToken = this.tokens.find(t => t.addr === token.addr)
if (currentToken) {
const index = this.tokens.indexOf(currentToken)
this.tokens[index] = token
} else {
this.tokens.push(token);
}
}
@action setTokenProperty = (index, property, value) => {
let newToken = {...this.tokens[index]};
newToken[property] = value;
this.tokens[index] = newToken;
}
@action removeToken = (index) => {
this.tokens.splice(index,1);
}
findToken(inputToken) {
return this.tokens.find((token) => {
if (inputToken['dim'] === token['dim'] && inputToken['addr'] === token['addr'] && inputToken['val'] === token['val']) {
return true;
}
return false;
});
}
}
const reservedTokenStore = new ReservedTokenStore();
export default reservedTokenStore;
export { ReservedTokenStore };
|
import { observable, action } from 'mobx';
class ReservedTokenStore {
@observable tokens;
constructor(tokens = []) {
this.tokens = tokens;
}
@action addToken = (token) => {
this.tokens.push(token);
}
@action setTokenProperty = (index, property, value) => {
let newToken = {...this.tokens[index]};
newToken[property] = value;
this.tokens[index] = newToken;
}
@action removeToken = (index) => {
this.tokens.splice(index,1);
}
findToken(inputToken) {
return this.tokens.find((token) => {
if (inputToken['dim'] === token['dim'] && inputToken['addr'] === token['addr'] && inputToken['val'] === token['val']) {
return true;
}
return false;
});
}
}
const reservedTokenStore = new ReservedTokenStore();
export default reservedTokenStore;
export { ReservedTokenStore };
|
Make use of expanduser() more sane
|
import logging
import logging.config
LOG_CONFIG = {
'version': 1,
'formatters': {
'standard': {'format': 'voltron: [%(levelname)s] %(message)s'}
},
'handlers': {
'default': {
'class': 'logging.StreamHandler',
'formatter': 'standard'
}
},
'loggers': {
'voltron': {
'handlers': ['default'],
'level': 'INFO',
'propogate': True,
}
}
}
VOLTRON_DIR = os.path.expanduser('~/.voltron/')
VOLTRON_CONFIG = VOLTRON_DIR + 'config'
def configure_logging():
logging.config.dictConfig(LOG_CONFIG)
log = logging.getLogger('voltron')
return log
|
import logging
import logging.config
LOG_CONFIG = {
'version': 1,
'formatters': {
'standard': {'format': 'voltron: [%(levelname)s] %(message)s'}
},
'handlers': {
'default': {
'class': 'logging.StreamHandler',
'formatter': 'standard'
}
},
'loggers': {
'voltron': {
'handlers': ['default'],
'level': 'INFO',
'propogate': True,
}
}
}
VOLTRON_DIR = '~/.voltron/'
VOLTRON_CONFIG = VOLTRON_DIR + 'config'
def configure_logging():
logging.config.dictConfig(LOG_CONFIG)
log = logging.getLogger('voltron')
return log
|
Fix extra space on 'Bienvenue !'
|
// Is device mobile?
if (window.innerWidth >= 850) {
// Give video its source, will initiate load. Otherwise, it won't load anything and will be hidden by CSS.
// This is so the video doesn't burn mobile data on phones.
// Also it's a stylistic nightmare.
document.getElementsByTagName('video')[0].src = 'video.mp4';
// Parallax effect for video background.
window.onscroll = function() {
document.getElementsByClassName('video-container')[0].style.transform = 'translateY(' + document.body.scrollTop / 1.5 + 'px)';
};
}
var greetings = ['¡BIENVENIDOS!', '欢迎!', 'BIENVENUE!', 'WILLKOMMEN!', '!أهلاً و سهلاً', 'TERVETULOA!', 'CREOSO!', 'WELKOM!', 'VELKOMMEN!', 'BONVENON!', 'FI\'I!', 'ようこそ', 'SELAMAT DATANG!', 'BEM-VINDO!', 'PE\'EL!', 'ÜDVÖZLET!', 'ברוך הבא', 'स्वागतम्!', 'WELCOME!'];
var counter = 0;
setInterval(function() {
document.getElementById('welcome').textContent = greetings[counter];
if (counter >= greetings.length - 1) {
counter = 0;
} else {
counter += 1;
}
}, 1500);
|
// Is device mobile?
if (window.innerWidth >= 850) {
// Give video its source, will initiate load. Otherwise, it won't load anything and will be hidden by CSS.
// This is so the video doesn't burn mobile data on phones.
// Also it's a stylistic nightmare.
document.getElementsByTagName('video')[0].src = 'video.mp4';
// Parallax effect for video background.
window.onscroll = function() {
document.getElementsByClassName('video-container')[0].style.transform = 'translateY(' + document.body.scrollTop / 1.5 + 'px)';
};
}
var greetings = ['¡BIENVENIDOS!', '欢迎!', 'BIENVENUE !', 'WILLKOMMEN!', '!أهلاً و سهلاً', 'TERVETULOA!', 'CREOSO!', 'WELKOM!', 'VELKOMMEN!', 'BONVENON!', 'FI\'I!', 'ようこそ', 'SELAMAT DATANG!', 'BEM-VINDO!', 'PE\'EL!', 'ÜDVÖZLET!', 'ברוך הבא', 'स्वागतम्!', 'WELCOME!'];
var counter = 0;
setInterval(function() {
document.getElementById('welcome').textContent = greetings[counter];
if (counter >= greetings.length - 1) {
counter = 0;
} else {
counter += 1;
}
}, 1500);
|
Remove calls to `path.resolve` with no root directory
|
import path from 'path';
import {exec} from 'node-promise-es6/child-process';
async function cli(fixture, env = null) {
const child = await exec(
'jasmine',
{
cwd: path.resolve(__dirname, `../../fixtures/${fixture}`),
env: Object.assign({}, process.env, env)
}
);
return child.stdout;
}
describe('jasmine-es6', function() {
it('uses spec/support/jasmine.json if it exists', async function() {
expect(await cli('with_jasmine_json'))
.toContain('1 spec, 0 failures');
});
it('uses the default jasmine.json if spec/support/jasmine.json does not exist', async function() {
expect(await cli('without_jasmine_json'))
.toContain('1 spec, 0 failures');
});
it('allows configuring the jasmine.json path via environment variable', async function() {
expect(await cli(
'with_jasmine_json',
{
JASMINE_CONFIG_PATH: 'spec/support/jasmine2.json'
}
)).toContain('No specs found');
});
it('installs the async override by default', async function() {
const output = await cli('async_override');
expect(output).toContain('2 specs, 0 failures');
const [, duration] = output.match(/Finished in ([\d.]+) seconds/);
expect(Number(duration)).toBeGreaterThan(5);
}, 10000);
});
|
import path from 'path';
import {exec} from 'node-promise-es6/child-process';
async function cli(fixture, env = null) {
const child = await exec(
'jasmine',
{
cwd: path.resolve(`fixtures/${fixture}`),
env: Object.assign({}, process.env, env)
}
);
return child.stdout;
}
describe('jasmine-es6', function() {
it('uses spec/support/jasmine.json if it exists', async function() {
expect(await cli('with_jasmine_json'))
.toContain('1 spec, 0 failures');
});
it('uses the default jasmine.json if spec/support/jasmine.json does not exist', async function() {
expect(await cli('without_jasmine_json'))
.toContain('1 spec, 0 failures');
});
it('allows configuring the jasmine.json path via environment variable', async function() {
expect(await cli(
'with_jasmine_json',
{
JASMINE_CONFIG_PATH: 'spec/support/jasmine2.json'
}
)).toContain('No specs found');
});
it('installs the async override by default', async function() {
const output = await cli('async_override');
expect(output).toContain('2 specs, 0 failures');
const [, duration] = output.match(/Finished in ([\d.]+) seconds/);
expect(Number(duration)).toBeGreaterThan(5);
}, 10000);
});
|
ROO-17: Create Spring Web Flow add-on
|
package org.springframework.roo.addon.web.flow;
import org.springframework.roo.shell.CliAvailabilityIndicator;
import org.springframework.roo.shell.CliCommand;
import org.springframework.roo.shell.CliOption;
import org.springframework.roo.shell.CommandMarker;
import org.springframework.roo.support.lifecycle.ScopeDevelopmentShell;
import org.springframework.roo.support.util.Assert;
/**
* Commands for the 'install web flow' add-on to be used by the ROO shell.
*
* @author Stefan Schmidt
* @since 1.0
*
*/
@ScopeDevelopmentShell
public class WebFlowCommands implements CommandMarker {
private WebFlowOperations webFlowOperations;
public WebFlowCommands(WebFlowOperations webFlowOperations) {
Assert.notNull(webFlowOperations, "Jms operations required");
this.webFlowOperations = webFlowOperations;
}
/**
* @return true if the "install web flow" command is available at this moment
*/
@CliAvailabilityIndicator("install web flow")
public boolean isInstallWebFlowAvailable() {
return webFlowOperations.isInstallWebFlowAvailable();
}
@CliCommand(value="install web flow", help="Install Spring Web Flow configuration artifacts into your project")
public void installWebFlow(
@CliOption(key={"flowName"}, mandatory=false, help="The name for your web flow") String flowName) {
webFlowOperations.installWebFlow(flowName);
}
}
|
package org.springframework.roo.addon.web.flow;
import org.springframework.roo.shell.CliAvailabilityIndicator;
import org.springframework.roo.shell.CliCommand;
import org.springframework.roo.shell.CliOption;
import org.springframework.roo.shell.CommandMarker;
import org.springframework.roo.support.lifecycle.ScopeDevelopmentShell;
import org.springframework.roo.support.util.Assert;
/**
* Commands for the 'install web flow' add-on to be used by the ROO shell.
*
* @author Stefan Schmidt
* @since 1.0
*
*/
@ScopeDevelopmentShell
public class WebFlowCommands implements CommandMarker {
private WebFlowOperations webFlowOperations;
public WebFlowCommands(WebFlowOperations webFlowOperations) {
Assert.notNull(webFlowOperations, "Jms operations required");
this.webFlowOperations = webFlowOperations;
}
/**
* @return true if the "install web flow" command is available at this moment
*/
@CliAvailabilityIndicator("install web flow")
public boolean isInstallWebFlowAvailable() {
return webFlowOperations.isInstallWebFlowAvailable();
}
@CliCommand(value="install web flow", help="Install Spring Web Flow configuration artifacts into your project")
public void installWebFlow(
@CliOption(key={"flowName"}, mandatory=false, help="The name your web flow") String flowName) {
webFlowOperations.installWebFlow(flowName);
}
}
|
Check if epages6 settings are configured
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jonas Gratz
# Copyright (c) 2015 Jonas Gratz
#
# License: MIT
#
"""This module exports the PerlEpages6 plugin class."""
import sublime
from SublimeLinter.lint import Linter, util
class PerlEpages6(Linter):
"""Provides an interface to perl on an epages6 virtual machine from a local machine.
Requires a configured copy of the Epages6 plugin (see https://github.com/ePages-rnd/sublimetext-epages6)."""
def cmd(self):
if self.view.settings().get('ep6vm'):
return [self.executable_path, sublime.packages_path() + '/Epages6/ep6-tools.py', '--vm', self.view.settings().get('ep6vm')['vm'], '--lint', '--file', self.view.file_name(), '--user', 'root', '--password', 'qwert6', '--ignore-me', '@'];
else:
return []
executable = 'python3'
syntax = ('modernperl', 'perl')
regex = r'(?P<message>.+?) at .+? line (?P<line>\d+)(, near "(?P<near>.+?)")?'
error_stream = util.STREAM_BOTH
tempfile_suffix = 'pm'
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jonas Gratz
# Copyright (c) 2015 Jonas Gratz
#
# License: MIT
#
"""This module exports the PerlEpages6 plugin class."""
import sublime
from SublimeLinter.lint import Linter, util
class PerlEpages6(Linter):
"""Provides an interface to perl on an epages6 virtual machine from a local machine.
Requires a configured copy of the Epages6 plugin (see https://github.com/ePages-rnd/sublimetext-epages6)."""
def cmd(self):
return [self.executable_path, sublime.packages_path() + '/Epages6/ep6-tools.py', '--vm', self.view.settings().get('ep6vm')['vm'], '--lint', '--file', self.view.file_name(), '--user', 'root', '--password', 'qwert6', '--ignore-me', '@'];
executable = 'python3'
syntax = ('modernperl', 'perl')
regex = r'(?P<message>.+?) at .+? line (?P<line>\d+)(, near "(?P<near>.+?)")?'
error_stream = util.STREAM_BOTH
tempfile_suffix = 'pm'
|
Add pass timestamp rather than date object
|
/* eslint-disable import/prefer-default-export */
import React from 'react';
/**
* Hook to simplify the use of the native date picker component by unwrapping
* the callback event and firing the callback only when the date is changing,
* providing a value to conditionally render the native picker and a callback
* to set the render value.
*
* Pass an onChange callback which will be called when a new date is selected
* from the datepicker and not when cancelled or dismissed, with a JS date object
* rather than a nativeEvent.
*
* @param {Func} callback callback
* @return {Array} [
* datePickerIsOpen: boolean indicator whether the date picker is open.
* openDatePicker: Callback function to set the datePickerIsOpen to true.
* datePickerCallback: Wrapped callback, triggered on changing the date in the picker,
* returning a JS date object.
* ]
*/
export const useDatePicker = onChangeCallback => {
const [datePickerIsOpen, setDatePickerIsOpen] = React.useState(false);
const datePickerCallback = React.useCallback(
event => {
setDatePickerIsOpen(false);
const { type, nativeEvent } = event;
if (type === 'set' && onChangeCallback) {
const { timestamp } = nativeEvent;
onChangeCallback(timestamp);
}
},
[onChangeCallback]
);
const openDatePicker = React.useCallback(() => {
setDatePickerIsOpen(true);
}, []);
return [datePickerIsOpen, openDatePicker, datePickerCallback];
};
|
/* eslint-disable import/prefer-default-export */
import React from 'react';
/**
* Hook to simplify the use of the native date picker component by unwrapping
* the callback event and firing the callback only when the date is changing,
* providing a value to conditionally render the native picker and a callback
* to set the render value.
*
* Pass an onChange callback which will be called when a new date is selected
* from the datepicker and not when cancelled or dismissed, with a JS date object
* rather than a nativeEvent.
*
* @param {Func} callback callback
* @return {Array} [
* datePickerIsOpen: boolean indicator whether the date picker is open.
* openDatePicker: Callback function to set the datePickerIsOpen to true.
* datePickerCallback: Wrapped callback, triggered on changing the date in the picker,
* returning a JS date object.
* ]
*/
export const useDatePicker = onChangeCallback => {
const [datePickerIsOpen, setDatePickerIsOpen] = React.useState(false);
const datePickerCallback = React.useCallback(
event => {
setDatePickerIsOpen(false);
const { type, nativeEvent } = event;
if (type === 'set' && onChangeCallback) {
const { timestamp } = nativeEvent;
onChangeCallback(new Date(timestamp));
}
},
[onChangeCallback]
);
const openDatePicker = React.useCallback(() => {
setDatePickerIsOpen(true);
}, []);
return [datePickerIsOpen, openDatePicker, datePickerCallback];
};
|
Add FindValue() and WatchValue() fuzz calls to the mix.
git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@140439 91177308-0d34-0410-b5e6-96231b3b80d8
|
"""
Fuzz tests an object after the default construction to make sure it does not crash lldb.
"""
import sys
import lldb
def fuzz_obj(obj):
obj.GetFrameID()
obj.GetPC()
obj.SetPC(0xffffffff)
obj.GetSP()
obj.GetFP()
obj.GetPCAddress()
obj.GetSymbolContext(0)
obj.GetModule()
obj.GetCompileUnit()
obj.GetFunction()
obj.GetSymbol()
obj.GetBlock()
obj.GetFunctionName()
obj.IsInlined()
obj.EvaluateExpression("x + y")
obj.EvaluateExpression("x + y", lldb.eDynamicCanRunTarget)
obj.GetFrameBlock()
obj.GetLineEntry()
obj.GetThread()
obj.Disassemble()
obj.GetVariables(True, True, True, True)
obj.GetVariables(True, True, True, False, lldb.eDynamicCanRunTarget)
obj.GetRegisters()
obj.FindVariable("my_var")
obj.FindVariable("my_var", lldb.eDynamicCanRunTarget)
obj.FindValue("your_var", lldb.eValueTypeVariableGlobal)
obj.FindValue("your_var", lldb.eValueTypeVariableStatic, lldb.eDynamicCanRunTarget)
obj.WatchValue("global_var", lldb.eValueTypeVariableGlobal, lldb.LLDB_WATCH_TYPE_READ)
obj.GetDescription(lldb.SBStream())
obj.Clear()
|
"""
Fuzz tests an object after the default construction to make sure it does not crash lldb.
"""
import sys
import lldb
def fuzz_obj(obj):
obj.GetFrameID()
obj.GetPC()
obj.SetPC(0xffffffff)
obj.GetSP()
obj.GetFP()
obj.GetPCAddress()
obj.GetSymbolContext(0)
obj.GetModule()
obj.GetCompileUnit()
obj.GetFunction()
obj.GetSymbol()
obj.GetBlock()
obj.GetFunctionName()
obj.IsInlined()
obj.EvaluateExpression("x + y")
obj.EvaluateExpression("x + y", lldb.eDynamicCanRunTarget)
obj.GetFrameBlock()
obj.GetLineEntry()
obj.GetThread()
obj.Disassemble()
obj.GetVariables(True, True, True, True)
obj.GetVariables(True, True, True, False, lldb.eDynamicCanRunTarget)
obj.GetRegisters()
obj.FindVariable("my_var")
obj.FindVariable("my_var", lldb.eDynamicCanRunTarget)
obj.GetDescription(lldb.SBStream())
obj.Clear()
|
Add ip forwarding and packet by packet sniffing code.
|
#!/etc/usr/python
from scapy.all import *
import sys
iface = "eth0"
filter = "ip"
#victim in this case is the initiator
VICTIM_IP = "192.168.1.121"
MY_IP = "192.168.1.154"
# gateway is the target
GATEWAY_IP = "192.168.1.171"
#VICTIM_MAC = "### don't want so show###"
MY_MAC = "08:00:27:7b:80:18"
#target mac address
GATEWAY_MAC = "08:00:27:24:08:34"
def handle_packet(packet):
if (packet[IP].dst == GATEWAY_IP) and (packet[Ether].dst == MY_MAC):
# we change the packet destination to the target machine
packet[Ether].dst = GATEWAY_MAC
# TODO: block iscsi packets with an if condition
if(packet[TCP]):
# sprintf("{Raw:%Raw.load%\n}")
print str(packet) + "\n"
sendp(packet)
print "A packet from " + packet[IP].src + " redirected!"
#printing redirected packets load
#sprintf("{Raw:%Raw.load%\n}")
sniff(prn=handle_packet, filter=filter, iface=iface, store=0)
|
#!/etc/usr/python
from scapy.all import *
import sys
iface = "eth0"
filter = "ip"
#victim in this case is the initiator
VICTIM_IP = "192.168.1.121"
MY_IP = "192.168.1.154"
# gateway is the target
GATEWAY_IP = "192.168.1.171"
#VICTIM_MAC = "### don't want so show###"
MY_MAC = "08:00:27:7B:80:18"
#target mac address
GATEWAY_MAC = "08:00:27:24:08:34"
def handle_packet(packet):
if (packet[IP].dst == GATEWAY_IP) and (packet[Ether].dst == MY_MAC):
# we change the packet destination to the target machine
packet[Ether].dst = GATEWAY_MAC
# TODO: block iscsi packets with an if condition
sendp(packet)
print "A packet from " + packet[IP].src + " redirected!"
#printing redirected packets load
sprintf("{Raw:%Raw.load%\n}")
sniff(prn=handle_packet, filter=filter, iface=iface, store=0)
|
Optimize twitter bootstrap nav-tabs javascript
|
// This is a manifest file that'll be compiled into including all the files listed below.
// Add new JavaScript/Coffee code in separate files in this directory and they'll automatically
// be included in the compiled file accessible from http://example.com/assets/application.js
// It's not advisable to add code directly here, but if you do, it'll appear at the bottom of the
// the compiled file.
//
//= require jquery
//= require jquery_ujs
//= require bootstrap.min
//
// Optimize remote forms
//
$("form[data-remote='true']").on('submit', function() {
$this = $(this)
$btn = $this.find("input[name='commit'][type='submit'], button[type='submit']")
.attr('data-loading-text', 'loading...')
.button('loading')
$this.on('ajax:complete', function() {
$btn.button('reset')
$this.off('ajax:complete')
})
});
//
// Optimize Nav-Tabs
//
$('.nav.nav-tabs.nav-js a').click(function (e) {
e.preventDefault()
// var scr = document.body.scrollTop;
window.location.hash = $(this).attr('href')
// document.body.scrollTop = scr;
$(this).tab('show')
})
if(window.location.hash) {
$('.nav-js a[href="'+window.location.hash+'"]').tab('show')
}
|
// This is a manifest file that'll be compiled into including all the files listed below.
// Add new JavaScript/Coffee code in separate files in this directory and they'll automatically
// be included in the compiled file accessible from http://example.com/assets/application.js
// It's not advisable to add code directly here, but if you do, it'll appear at the bottom of the
// the compiled file.
//
//= require jquery
//= require jquery_ujs
//= require bootstrap.min
$("form[data-remote='true']").on('submit', function() {
$this = $(this)
$btn = $this.find("input[name='commit'][type='submit'], button[type='submit']")
.attr('data-loading-text', 'loading...')
.button('loading')
$this.on('ajax:complete', function() {
$btn.button('reset')
$this.off('ajax:complete')
})
});
$('.nav.nav-tabs.nav-js a').click(function (e) {
e.preventDefault()
$(this).tab('show')
})
|
Store jobs with reduced redundancy on S3
|
'use stirct'
var opbeat = require('opbeat').start()
var uuid = require('node-uuid')
var AWS = require('aws-sdk')
var Printer = require('ipp-printer')
var port = process.env.PORT || 3000
var s3 = new AWS.S3()
var printer = new Printer({ name: 'printbin', port: port, zeroconf: false })
printer.on('job', function (job) {
var key = uuid.v4() + '.ps'
console.log('processing job %d (key: %s)', job.id, key)
job.on('end', function () {
console.log('done reading job %d (key: %s)', job.id, key)
})
var params = {
Bucket: 'watson-printbin',
ACL: 'public-read',
ContentType: 'application/postscript',
StorageClass: 'REDUCED_REDUNDANCY',
Key: key,
Body: job
}
s3.upload(params, function (err, data) {
if (err) return opbeat.captureError(err)
console.log('done uploading job %d (key: %s)', job.id, key)
})
})
|
'use stirct'
var opbeat = require('opbeat').start()
var uuid = require('node-uuid')
var AWS = require('aws-sdk')
var Printer = require('ipp-printer')
var port = process.env.PORT || 3000
var s3 = new AWS.S3()
var printer = new Printer({ name: 'printbin', port: port, zeroconf: false })
printer.on('job', function (job) {
var key = uuid.v4() + '.ps'
console.log('processing job %d (key: %s)', job.id, key)
job.on('end', function () {
console.log('done reading job %d (key: %s)', job.id, key)
})
var params = {
Bucket: 'watson-printbin',
ACL: 'public-read',
ContentType: 'application/postscript',
Key: key,
Body: job
}
s3.upload(params, function (err, data) {
if (err) return opbeat.captureError(err)
console.log('done uploading job %d (key: %s)', job.id, key)
})
})
|
Mark es_test decorator as nottest
Second try...
|
import json
from nose.plugins.attrib import attr
from nose.tools import nottest
class ElasticTestMixin(object):
def checkQuery(self, query, json_output, is_raw_query=False):
if is_raw_query:
raw_query = query
else:
raw_query = query.raw_query
msg = "Expected Query:\n{}\nGenerated Query:\n{}".format(
json.dumps(json_output, indent=4),
json.dumps(raw_query, indent=4),
)
# NOTE: This method thinks [a, b, c] != [b, c, a]
self.assertEqual(raw_query, json_output, msg=msg)
@nottest
def es_test(test):
"""Decorator for tagging ElasticSearch tests
:param test: A test class, method, or function.
"""
return attr(es_test=True)(test)
|
import json
from nose.plugins.attrib import attr
class ElasticTestMixin(object):
def checkQuery(self, query, json_output, is_raw_query=False):
if is_raw_query:
raw_query = query
else:
raw_query = query.raw_query
msg = "Expected Query:\n{}\nGenerated Query:\n{}".format(
json.dumps(json_output, indent=4),
json.dumps(raw_query, indent=4),
)
# NOTE: This method thinks [a, b, c] != [b, c, a]
self.assertEqual(raw_query, json_output, msg=msg)
def es_test(test):
"""Decorator for tagging ElasticSearch tests
:param test: A test class, method, or function.
"""
return attr(es_test=True)(test)
|
polly.py: Fix vcvarsall_dir for Visual Studio 2017
[skip ci]
|
# Copyright (c) 2014, Ruslan Baratov
# All rights reserved.
import detail.util
import os
import sys
def get(arch, vs_version):
vs_path_env = 'VS{}0COMNTOOLS'.format(vs_version)
vs_path = os.getenv(vs_path_env)
if not vs_path:
sys.exit(
'Environment variable {} is empty, '
'looks like Visual Studio {} is not installed'.format(
vs_path_env, vs_version
)
)
if vs_version == '15':
vcvarsall_dir = os.path.join(vs_path, '..', '..', 'VC', 'Auxiliary', 'Build')
else:
vcvarsall_dir = os.path.join(vs_path, '..', '..', 'VC')
if not os.path.isdir(vcvarsall_dir):
sys.exit(
'Directory `{}` not exists '
'({} environment variable)'.format(vcvarsall_dir, vs_path_env)
)
vcvarsall_path = os.path.join(vcvarsall_dir, 'vcvarsall.bat')
if not os.path.isfile(vcvarsall_path):
sys.exit(
'File vcvarsall.bat not found in directory '
'`{}` ({} environment variable)'.format(vcvarsall_dir, vs_path_env)
)
return detail.util.get_environment_from_batch_command([vcvarsall_path, arch])
|
# Copyright (c) 2014, Ruslan Baratov
# All rights reserved.
import detail.util
import os
import sys
def get(arch, vs_version):
vs_path_env = 'VS{}0COMNTOOLS'.format(vs_version)
vs_path = os.getenv(vs_path_env)
if not vs_path:
sys.exit(
'Environment variable {} is empty, '
'looks like Visual Studio {} is not installed'.format(
vs_path_env, vs_version
)
)
vcvarsall_dir = os.path.join(vs_path, '..', '..', 'VC')
if not os.path.isdir(vcvarsall_dir):
sys.exit(
'Directory `{}` not exists '
'({} environment variable)'.format(vcvarsall_dir, vs_path_env)
)
vcvarsall_path = os.path.join(vcvarsall_dir, 'vcvarsall.bat')
if not os.path.isfile(vcvarsall_path):
sys.exit(
'File vcvarsall.bat not found in directory '
'`{}` ({} environment variable)'.format(vcvarsall_dir, vs_path_env)
)
return detail.util.get_environment_from_batch_command([vcvarsall_path, arch])
|
[KARAF-1109] Update the archetypes itest to reflect the new karaf-command-archetype artifactId
git-svn-id: 71d8a689455c5fbb0f077bc40adcfc391e14cb9d@1214843 13f79535-47bb-0310-9956-ffa450edef68
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.karaf.archetypes;
import java.util.Properties;
public class CommandArchetypeTest extends AbstractArchetypeTest {
public void testCommand() throws Exception {
Properties commandArchetypeParameters = new Properties();
commandArchetypeParameters.setProperty("scope", "testscope");
commandArchetypeParameters.setProperty("command", "testcommand");
commandArchetypeParameters.setProperty("description", "testdescription");
testKarafArchetype("karaf-command-archetype", commandArchetypeParameters);
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.karaf.archetypes;
import java.util.Properties;
public class CommandArchetypeTest extends AbstractArchetypeTest {
public void testCommand() throws Exception {
Properties commandArchetypeParameters = new Properties();
commandArchetypeParameters.setProperty("scope", "testscope");
commandArchetypeParameters.setProperty("command", "testcommand");
commandArchetypeParameters.setProperty("description", "testdescription");
testKarafArchetype("archetypes-command", commandArchetypeParameters);
}
}
|
Replace magic number with symbolic constant
|
<?php
namespace Bauhaus\Http;
use Psr\Http\Message\ResponseInterface;
use Bauhaus\Http\Message;
use Bauhaus\Http\Response\Status;
use Bauhaus\Http\Response\StatusInterface;
class Response extends Message implements ResponseInterface
{
const DEFAULT_STATUS_CODE = 200;
private $status = null;
public function __construct(StatusInterface $status = null)
{
if (null === $status) {
$status = new Status(self::DEFAULT_STATUS_CODE);
}
$this->status = $status;
}
public function withStatus($code, $reasonPhrase = '')
{
$status = new Status($code, $reasonPhrase);
return new self($status);
}
public function getStatusCode()
{
return $this->status->code();
}
public function getReasonPhrase()
{
return $this->status->reasonPhrase();
}
}
|
<?php
namespace Bauhaus\Http;
use Psr\Http\Message\ResponseInterface;
use Bauhaus\Http\Message;
use Bauhaus\Http\Response\Status;
use Bauhaus\Http\Response\StatusInterface;
class Response extends Message implements ResponseInterface
{
private $status = null;
public function __construct(StatusInterface $status = null)
{
if (null === $status) {
$status = new Status(200);
}
$this->status = $status;
}
public function withStatus($code, $reasonPhrase = '')
{
$status = new Status($code, $reasonPhrase);
return new self($status);
}
public function getStatusCode()
{
return $this->status->code();
}
public function getReasonPhrase()
{
return $this->status->reasonPhrase();
}
}
|
Remove double $ from variable
|
<?php
/*
* The MIT License (MIT)
*
* Copyright (c) 2016 Bruno P. Kinoshita, Peter Florijn
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
$factory->define(Nestor\Entities\Projects::class, function (Faker\Generator $faker) {
return [
'id' => $faker->numberBetween(1, 1000),
'name' => $faker->name,
'description' => $faker->text,
'project_statuses_id' => $faker->numberBetween(1, 10),
'created_by' => $faker->name
];
});
|
<?php
/*
* The MIT License (MIT)
*
* Copyright (c) 2016 Bruno P. Kinoshita, Peter Florijn
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
$factory->define(Nestor\Entities\Projects::class, function (Faker\Generator $faker) {
return [
'id' => $faker->numberBetween(1, 1000),
'name' => $faker->name,
'description' => $faker->text,
'project_statuses_id' => $$faker->numberBetween(1, 10),
'created_by' => $faker->name
];
});
|
Simplify f-string to remove cast
|
import subprocess
from pathlib import Path
po_path: Path = Path(__file__).resolve().parent
def run_babel(command: str, input: Path, output_file: Path, locale: str):
subprocess.run(
[
"pybabel",
command,
f"--input={input}",
f"--output-file={output_file}",
f"--locale={locale}",
"--domain=gaphor",
]
)
def update_po_files():
pot_path = po_path / "gaphor.pot"
for path in (path for path in po_path.iterdir() if path.suffix == ".po"):
run_babel("update", pot_path, path, path.stem)
def compile_mo_files():
for path in (path for path in po_path.iterdir() if path.suffix == ".po"):
mo_path = po_path.parent / "locale" / path.stem / "LC_MESSAGES" / "gaphor.mo"
mo_path.parent.mkdir(parents=True, exist_ok=True)
run_babel("compile", path, mo_path, path.stem)
if __name__ == "__main__":
update_po_files()
compile_mo_files()
|
import subprocess
from pathlib import Path
po_path: Path = Path(__file__).resolve().parent
def run_babel(command: str, input: Path, output_file: Path, locale: str):
subprocess.run(
[
"pybabel",
command,
f"--input={str(input)}",
f"--output-file={str(output_file)}",
f"--locale={str(locale)}",
"--domain=gaphor",
]
)
def update_po_files():
pot_path = po_path / "gaphor.pot"
for path in (path for path in po_path.iterdir() if path.suffix == ".po"):
run_babel("update", pot_path, path, path.stem)
def compile_mo_files():
for path in (path for path in po_path.iterdir() if path.suffix == ".po"):
mo_path = po_path.parent / "locale" / path.stem / "LC_MESSAGES" / "gaphor.mo"
mo_path.parent.mkdir(parents=True, exist_ok=True)
run_babel("compile", path, mo_path, path.stem)
if __name__ == "__main__":
update_po_files()
compile_mo_files()
|
Add message for system-generated definitions.
|
package org.reldb.dbrowser.ui.content.rel.operator;
import org.eclipse.swt.graphics.Image;
import org.reldb.dbrowser.ui.content.rel.DbTreeAction;
import org.reldb.dbrowser.ui.content.rel.DbTreeItem;
import org.reldb.dbrowser.ui.content.rel.NaiveShowTab;
import org.reldb.dbrowser.ui.content.rel.RelPanel;
import org.reldb.rel.client.Tuple;
import org.reldb.rel.client.Tuples;
public class OperatorPlayer extends DbTreeAction {
public OperatorPlayer(RelPanel relPanel) {
super(relPanel);
}
@Override
public void go(DbTreeItem item, Image image) {
Tuples tuples = relPanel.getConnection().getTuples("(EXTEND (sys.Operators UNGROUP Implementations): {opName := Signature || IF ReturnsType <> '' THEN ' RETURNS ' || ReturnsType ELSE '' END IF} WHERE opName = '" + item.getName() + "') {Definition}");
String definition = "???";
if (tuples != null)
for (Tuple tuple: tuples)
definition = tuple.getAttributeValue("Definition").toString();
if (definition.trim().length() == 0)
definition = "<System-generated definition is unavailable.>";
NaiveShowTab typetab = new NaiveShowTab(relPanel, item, definition);
typetab.setImage(image);
relPanel.getTabFolder().setSelection(typetab);
}
}
|
package org.reldb.dbrowser.ui.content.rel.operator;
import org.eclipse.swt.graphics.Image;
import org.reldb.dbrowser.ui.content.rel.DbTreeAction;
import org.reldb.dbrowser.ui.content.rel.DbTreeItem;
import org.reldb.dbrowser.ui.content.rel.NaiveShowTab;
import org.reldb.dbrowser.ui.content.rel.RelPanel;
import org.reldb.rel.client.Tuple;
import org.reldb.rel.client.Tuples;
public class OperatorPlayer extends DbTreeAction {
public OperatorPlayer(RelPanel relPanel) {
super(relPanel);
}
@Override
public void go(DbTreeItem item, Image image) {
Tuples tuples = relPanel.getConnection().getTuples("(EXTEND (sys.Operators UNGROUP Implementations): {opName := Signature || IF ReturnsType <> '' THEN ' RETURNS ' || ReturnsType ELSE '' END IF} WHERE opName = '" + item.getName() + "') {Definition}");
String definition = "???";
if (tuples != null)
for (Tuple tuple: tuples)
definition = tuple.getAttributeValue("Definition").toString();
NaiveShowTab typetab = new NaiveShowTab(relPanel, item, definition);
typetab.setImage(image);
relPanel.getTabFolder().setSelection(typetab);
}
}
|
Use Fira Code again in Hyper
|
module.exports = {
config: {
fontSize: 12,
fontFamily: '"Fira Code", Monoid, Menlo, "DejaVu Sans Mono", "Lucida Console", monospace, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol"',
css: `
header {
box-shadow: 0px 2px 20px rgba(0, 0, 0, 0.17);
}
.hyperterm_main {
border-color: transparent !important;
}
`,
termCSS: `
x-row > span {
}
`,
padding: '0'
},
plugins: [
'hyperterm-adventurous',
'hyperterm-title',
'hyperterm-focus-reporting'
],
// `~/.hyperterm_plugins/local/`
localPlugins: []
};
|
module.exports = {
config: {
fontSize: 13,
fontFamily: '"Operator Mono", "Fira Code", Monoid, Menlo, "DejaVu Sans Mono", "Lucida Console", monospace',
css: `
header {
box-shadow: 0px 2px 20px rgba(0, 0, 0, 0.17);
}
.hyperterm_main {
border-color: transparent !important;
}
`,
termCSS: `
x-row > span {
}
`,
padding: '0'
},
plugins: [
'hyperterm-adventurous',
'hyperterm-title',
'hyperterm-focus-reporting'
],
// `~/.hyperterm_plugins/local/`
localPlugins: []
};
|
Remove borough from locality lookup
We may have gotten overeager adding borough everywhere, it's definitely
below locality in the heirarchy.
|
/* Look up which admin fields should be populated for a record in a given layer.
*
* The logic is: look up eveything above in the WOF heirarchy, ignoring things like
* 'dependency'
*
* Note: this filtering really only matters for geonames currently, since OSM and OA
* consist entirely of venue and address records, which should have all fields
* looked up. WOF documents use the actual WOF heirarchy to fill in admin values,
* so they also won't be affected by this.
*/
function getAdminLayers(layer) {
switch (layer) {
case 'country':
return ['country'];
case 'region':
return ['country', 'macroregion', 'region'];
case 'county':
return ['country', 'macroregion', 'region', 'macrocounty', 'county'];
case 'locality':
return ['country', 'macroregion', 'region', 'macrocounty', 'county', 'locality'];
default:
return undefined;//undefined means use all layers as normal
}
}
module.exports = getAdminLayers;
|
/* Look up which admin fields should be populated for a record in a given layer.
*
* The logic is: look up eveything above in the WOF heirarchy, ignoring things like
* 'dependency'
*
* Note: this filtering really only matters for geonames currently, since OSM and OA
* consist entirely of venue and address records, which should have all fields
* looked up. WOF documents use the actual WOF heirarchy to fill in admin values,
* so they also won't be affected by this.
*/
function getAdminLayers(layer) {
switch (layer) {
case 'country':
return ['country'];
case 'region':
return ['country', 'macroregion', 'region'];
case 'county':
return ['country', 'macroregion', 'region', 'macrocounty', 'county'];
case 'locality':
return ['country', 'macroregion', 'region', 'macrocounty', 'county', 'locality', 'borough'];
default:
return undefined;//undefined means use all layers as normal
}
}
module.exports = getAdminLayers;
|
Update test for new column naming rules. Remove '' as a name and call column 'C1'
|
import sys
sys.path.insert(1, "../../../")
import h2o
def cupMediumGBM(ip,port):
# Connect to h2o
h2o.init(ip,port)
train = h2o.import_frame(path=h2o.locate("bigdata/laptop/usecases/cup98LRN_z.csv"))
test = h2o.import_frame(path=h2o.locate("bigdata/laptop/usecases/cup98VAL_z.csv"))
train["TARGET_B"] = train["TARGET_B"].asfactor()
# Train H2O GBM Model:
train_cols = train.names()
for c in ['C1', "TARGET_D", "TARGET_B", "CONTROLN"]:
train_cols.remove(c)
model = h2o.gbm(x=train[train_cols], y=train["TARGET_B"], distribution = "bernoulli", ntrees = 5)
if __name__ == "__main__":
h2o.run_test(sys.argv, cupMediumGBM)
|
import sys
sys.path.insert(1, "../../../")
import h2o
def cupMediumGBM(ip,port):
# Connect to h2o
h2o.init(ip,port)
train = h2o.import_frame(path=h2o.locate("bigdata/laptop/usecases/cup98LRN_z.csv"))
test = h2o.import_frame(path=h2o.locate("bigdata/laptop/usecases/cup98VAL_z.csv"))
train["TARGET_B"] = train["TARGET_B"].asfactor()
# Train H2O GBM Model:
train_cols = train.names()
for c in ['', "TARGET_D", "TARGET_B", "CONTROLN"]:
train_cols.remove(c)
model = h2o.gbm(x=train[train_cols], y=train["TARGET_B"], distribution = "bernoulli", ntrees = 5)
if __name__ == "__main__":
h2o.run_test(sys.argv, cupMediumGBM)
|
Fix CSS getting required in AJAX request
|
<?php
namespace Concrete\Package\KintDebug;
use Concrete\Core\Package\Package;
use View;
defined('C5_EXECUTE') or die(_("Access Denied."));
require_once __DIR__ . '/vendor/autoload.php';
class Controller extends Package {
protected $pkgHandle = 'kint_debug';
protected $appVersionRequired = '5.7.0.4';
protected $pkgVersion = '0.9.1';
public function getPackageDescription() {
return t('Add Kint debugging tools');
}
public function getPackageName() {
return t('Kint Debug');
}
public function on_start() {
$al = \Concrete\Core\Asset\AssetList::getInstance();
$al->register('css', 'kint_debug/css', 'css/debug.css', array(), $this->pkgHandle);
// Never include the css in ajax responses
if(!$this->isAjaxRequest()) {
View::getInstance()->requireAsset('css', 'kint_debug/css');
}
}
/**
* Check, if possible, that this execution was triggered by an AJAX request.
* @return bool
*/
protected function isAjaxRequest()
{
return !empty($_SERVER['HTTP_X_REQUESTED_WITH']) &&
strtolower($_SERVER['HTTP_X_REQUESTED_WITH']) === 'xmlhttprequest';
}
}
|
<?php
namespace Concrete\Package\KintDebug;
use Concrete\Core\Package\Package;
use View;
defined('C5_EXECUTE') or die(t("Access Denied."));
require_once __DIR__ . '/vendor/autoload.php';
class Controller extends Package {
protected $pkgHandle = 'kint_debug';
protected $appVersionRequired = '5.7.0.4';
protected $pkgVersion = '0.9.1';
public function getPackageDescription() {
return t('Add Kint debugging tools');
}
public function getPackageName() {
return t('Kint Debug');
}
public function on_start() {
$al = \Concrete\Core\Asset\AssetList::getInstance();
$al->register('css', $this->pkgHandle . '/css', 'css/debug.css', array(), $this->pkgHandle);
View::getInstance()->requireAsset('css', $this->pkgHandle . '/css');
}
}
|
Allow Revolut username to be minimum 3 characters
|
/*
* This file is part of Bisq.
*
* Bisq is free software: you can redistribute it and/or modify it
* under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or (at
* your option) any later version.
*
* Bisq is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public
* License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Bisq. If not, see <http://www.gnu.org/licenses/>.
*/
package bisq.desktop.util.validation;
public final class RevolutValidator extends LengthValidator {
public RevolutValidator() {
// Not sure what are requirements for Revolut user names
// Please keep in mind that even we force users to set user name at startup we should handle also the case
// that the old accountID as phone number or email is displayed at the username text field and we do not
// want to break validation in those cases. So being too strict on the validators might cause more troubles
// as its worth...
// UPDATE 04/2021: Revolut usernames could be edited (3-16 characters, lowercase a-z and numbers only)
super(3, 100);
}
public ValidationResult validate(String input) {
return super.validate(input);
}
}
|
/*
* This file is part of Bisq.
*
* Bisq is free software: you can redistribute it and/or modify it
* under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or (at
* your option) any later version.
*
* Bisq is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public
* License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Bisq. If not, see <http://www.gnu.org/licenses/>.
*/
package bisq.desktop.util.validation;
public final class RevolutValidator extends LengthValidator {
public RevolutValidator() {
// Not sure what are requirements for Revolut user names
// Please keep in mind that even we force users to set user name at startup we should handle also the case
// that the old accountID as phone number or email is displayed at the username text field and we do not
// want to break validation in those cases. So being too strict on the validators might cause more troubles
// as its worth...
super(5, 100);
}
public ValidationResult validate(String input) {
return super.validate(input);
}
}
|
Fix spelling of 'IPv4' and 'IPv6' (cosmetic)
|
ACTION_CREATE = 0
ACTION_VIEW = 1
ACTION_UPDATE = 2
ACTION_DELETE = 3
ACTIONS = {
ACTION_CREATE: 'Create',
ACTION_VIEW: 'View',
ACTION_UPDATE: 'Update',
ACTION_DELETE: 'Delete',
}
LEVEL_GUEST = 0
LEVEL_USER = 1
LEVEL_ADMIN = 2
LEVELS = {
LEVEL_GUEST: 'Guest',
LEVEL_USER: 'User',
LEVEL_ADMIN: 'Admin',
}
IP_TYPE_4 = '4'
IP_TYPE_6 = '6'
IP_TYPES = {
IP_TYPE_4: 'IPv4',
IP_TYPE_6: 'IPv6'
}
DHCP_OBJECTS = ("workgroup", "vrf", "vlan", "site", "range", "network",
"static_interface", "dynamic_interface", "workgroup_kv",
"vrf_kv", "vlan_kv", "site_kv", "range_kv", "network_kv",
"static_interface_kv", "dynamic_interface_kv",)
DNS_OBJECTS = ("address_record", "cname", "domain", "mx", "nameserver", "ptr",
"soa", "srv", "sshfp", "txt", "view",)
CORE_OBJECTS = ("ctnr_users", "ctnr", "user", "system")
|
ACTION_CREATE = 0
ACTION_VIEW = 1
ACTION_UPDATE = 2
ACTION_DELETE = 3
ACTIONS = {
ACTION_CREATE: 'Create',
ACTION_VIEW: 'View',
ACTION_UPDATE: 'Update',
ACTION_DELETE: 'Delete',
}
LEVEL_GUEST = 0
LEVEL_USER = 1
LEVEL_ADMIN = 2
LEVELS = {
LEVEL_GUEST: 'Guest',
LEVEL_USER: 'User',
LEVEL_ADMIN: 'Admin',
}
IP_TYPE_4 = '4'
IP_TYPE_6 = '6'
IP_TYPES = {
IP_TYPE_4: 'ipv4',
IP_TYPE_6: 'ipv6'
}
DHCP_OBJECTS = ("workgroup", "vrf", "vlan", "site", "range", "network",
"static_interface", "dynamic_interface", "workgroup_kv",
"vrf_kv", "vlan_kv", "site_kv", "range_kv", "network_kv",
"static_interface_kv", "dynamic_interface_kv",)
DNS_OBJECTS = ("address_record", "cname", "domain", "mx", "nameserver", "ptr",
"soa", "srv", "sshfp", "txt", "view",)
CORE_OBJECTS = ("ctnr_users", "ctnr", "user", "system")
|
Add checking if resctrl path exists
Signed-off-by: Katarzyna Kujawa <6d81612c80214d850c10c80be3c4dcdb2935414e@intel.com>
|
// +build linux
// Copyright 2020 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Manager of resctrl for containers.
package resctrl
import (
"os"
"github.com/google/cadvisor/stats"
"github.com/opencontainers/runc/libcontainer/intelrdt"
)
type manager struct {
id string
stats.NoopDestroy
}
func (m manager) GetCollector(resctrlPath string) (stats.Collector, error) {
if _, err := os.Stat(resctrlPath); err != nil {
return &stats.NoopCollector{}, err
}
collector := newCollector(m.id, resctrlPath)
return collector, nil
}
func NewManager(id string) (stats.Manager, error) {
if intelrdt.IsMBMEnabled() || intelrdt.IsCMTEnabled() {
return &manager{id: id}, nil
}
return &stats.NoopManager{}, nil
}
|
// +build linux
// Copyright 2020 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Manager of resctrl for containers.
package resctrl
import (
"github.com/google/cadvisor/stats"
"github.com/opencontainers/runc/libcontainer/intelrdt"
)
type manager struct {
id string
stats.NoopDestroy
}
func (m manager) GetCollector(resctrlPath string) (stats.Collector, error) {
collector := newCollector(m.id, resctrlPath)
return collector, nil
}
func NewManager(id string) (stats.Manager, error) {
if intelrdt.IsMBMEnabled() || intelrdt.IsCMTEnabled() {
return &manager{id: id}, nil
}
return &stats.NoopManager{}, nil
}
|
Replace 'assert session != null' with Assert.notnull(Object, String)
|
/*
* Copyright 2010 The myBatis Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mybatis.spring;
import org.apache.ibatis.session.SqlSession;
import org.springframework.transaction.support.ResourceHolderSupport;
import org.springframework.util.Assert;
/**
*
* @version $Id$
*/
public final class SqlSessionHolder extends ResourceHolderSupport {
private final SqlSession sqlSession;
public SqlSessionHolder(SqlSession sqlSession) {
Assert.notNull(sqlSession, "SqlSession must not be null");
this.sqlSession = sqlSession;
}
public SqlSession getSqlSession() {
return sqlSession;
}
}
|
/*
* Copyright 2010 The myBatis Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mybatis.spring;
import org.apache.ibatis.session.SqlSession;
import org.springframework.transaction.support.ResourceHolderSupport;
/**
*
* @version $Id$
*/
public final class SqlSessionHolder extends ResourceHolderSupport {
private final SqlSession session;
public SqlSessionHolder(SqlSession session) {
assert session != null;
this.session = session;
}
public SqlSession getSqlSession() {
return session;
}
}
|
Delete images instead of printing
|
#!/usr/bin/env python
from collections import defaultdict
import subprocess
import os
KEEP_LAST_VERSIONS = os.environ.get('KEEP_LAST_VERSIONS', 4)
def find_obsolete_images(images):
for image_name, versions in images.items():
if len(versions) > KEEP_LAST_VERSIONS:
obsolete_versions = sorted(versions, reverse=True)[4:]
for version in obsolete_versions:
yield '{}:{}'.format(image_name, version)
def parse_images(lines):
images = defaultdict(list)
for line in lines:
try:
image_name, version = line.split(' ')
version_num = int(version.replace('v', ''))
images[image_name].append(version_num)
except ValueError:
pass
return images
def remove_image(image_name):
subprocess.check_call(['docker', 'rm', image_name])
def all_images():
output = subprocess \
.check_output(['./docker_image_versions.sh'], shell=True) \
.decode('utf-8')
lines = output.split('\n')
return parse_images(lines)
if __name__ == '__main__':
images = all_images()
for image_name in find_obsolete_images(images):
remove_image(image_name)
|
#!/usr/bin/env python
from collections import defaultdict
import subprocess
import os
KEEP_LAST_VERSIONS = os.environ.get('KEEP_LAST_VERSIONS', 4)
def find_obsolete_images(images):
for image_name, versions in images.items():
if len(versions) > KEEP_LAST_VERSIONS:
obsolete_versions = sorted(versions, reverse=True)[4:]
for version in obsolete_versions:
yield '{}:{}'.format(image_name, version)
def parse_images(lines):
images = defaultdict(list)
for line in lines:
try:
image_name, version = line.split(' ')
version_num = float(version.replace('v', ''))
images[image_name].append(version_num)
except ValueError:
pass
return images
def remove_image(image_name):
# subprocess.call(['docker', 'rm', image_name])
print('docker rm ' + image_name)
def all_images():
output = subprocess \
.check_output(['./docker_image_versions.sh'], shell=True) \
.decode('utf-8')
lines = output.split('\n')
return parse_images(lines)
if __name__ == '__main__':
images = all_images()
for image_name in find_obsolete_images(images):
remove_image(image_name)
|
Fix a Flow type annotation in ValidationError
|
/**
* Node.js API Starter Kit (https://reactstarter.com/nodejs)
*
* Copyright © 2016-present Kriasoft, LLC. All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE.txt file in the root directory of this source tree.
*/
/* @flow */
// TODO: Log the error to Google Stackdriver, Rollbar etc.
function report(error: Error) {
// eslint-disable-next-line no-console
console.error(error);
}
export class ValidationError extends Error {
code = 400;
state: any;
constructor(errors: Array<{ key: string, message: string }>) {
super('The request is invalid.');
this.state = errors.reduce((result, error) => {
if (Object.prototype.hasOwnProperty.call(result, error.key)) {
result[error.key].push(error.message);
} else {
Object.defineProperty(result, error.key, {
value: [error.message],
enumerable: true,
});
}
return result;
}, {});
}
}
export class UnauthorizedError extends Error {
code = 401;
message = this.message || 'Anonymous access is denied.';
}
export class ForbiddenError extends Error {
code = 403;
message = this.message || 'Access is denied.';
}
export default { report };
|
/**
* Node.js API Starter Kit (https://reactstarter.com/nodejs)
*
* Copyright © 2016-present Kriasoft, LLC. All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE.txt file in the root directory of this source tree.
*/
/* @flow */
// TODO: Log the error to Google Stackdriver, Rollbar etc.
function report(error: Error) {
// eslint-disable-next-line no-console
console.error(error);
}
export class ValidationError extends Error {
code = 400;
state: any;
constructor(errors: []) {
super('The request is invalid.');
this.state = errors.reduce((result, error) => {
if (Object.prototype.hasOwnProperty.call(result, error.key)) {
result[error.key].push(error.message);
} else {
Object.defineProperty(result, error.key, {
value: [error.message],
enumerable: true,
});
}
return result;
}, {});
}
}
export class UnauthorizedError extends Error {
code = 401;
message = this.message || 'Anonymous access is denied.';
}
export class ForbiddenError extends Error {
code = 403;
message = this.message || 'Access is denied.';
}
export default { report };
|
Load lib extensions after loading middleware
|
var Protos = require('../');
Protos.bootstrap(__dirname, {
// Server configuration
server: {
host: 'localhost',
port: 8080,
multiProcess: false,
stayUp: false
},
// Application environments
environments: {
default: 'development',
development: function(app) {
app.debugLog = false;
}
},
// Application events
events: {
init: function(app) {
// Load middleware
app.use('logger');
app.use('markdown');
app.use('body_parser');
app.use('cookie_parser');
app.use('static_server');
// Load extensions in lib/
app.libExtensions();
}
}
});
module.exports = protos.app;
|
var Protos = require('../');
Protos.bootstrap(__dirname, {
// Server configuration
server: {
host: 'localhost',
port: 8080,
multiProcess: false,
stayUp: false
},
// Application environments
environments: {
default: 'development',
development: function(app) {
app.debugLog = false;
}
},
// Application events
events: {
init: function(app) {
// Load extensions in lib/
app.libExtensions();
// Load middleware
app.use('logger');
app.use('markdown');
app.use('body_parser');
app.use('cookie_parser');
app.use('static_server');
}
}
});
module.exports = protos.app;
|
Use field.to_python to do django type conversions on the field before checking if dirty.
This solves issues where you might have a decimal field that you write a string to, eg:
>>> m = MyModel.objects.get(id=1)
>>> m.my_decimal_field
Decimal('1.00')
>>> m.my_decimal_field = u'1.00' # from a form or something
>>> m.is_dirty() # currently evaluates to True, should evaluate to False
False
This pull request could probably use some unit testing, but it should be safe as the base class for django fields defines to_python as:
def to_python(self, value):
return value
So, any field type that does not have an explicit to_python method will behave as before this change.
|
# Adapted from http://stackoverflow.com/questions/110803/dirty-fields-in-django
from django.db.models.signals import post_save
class DirtyFieldsMixin(object):
def __init__(self, *args, **kwargs):
super(DirtyFieldsMixin, self).__init__(*args, **kwargs)
post_save.connect(reset_state, sender=self.__class__,
dispatch_uid='%s-DirtyFieldsMixin-sweeper' % self.__class__.__name__)
reset_state(sender=self.__class__, instance=self)
def _as_dict(self):
return dict([(f.name, f.to_python(getattr(self, f.name))) for f in self._meta.local_fields if not f.rel])
def get_dirty_fields(self):
new_state = self._as_dict()
return dict([(key, value) for key, value in self._original_state.iteritems() if value != new_state[key]])
def is_dirty(self):
# in order to be dirty we need to have been saved at least once, so we
# check for a primary key and we need our dirty fields to not be empty
if not self.pk:
return True
return {} != self.get_dirty_fields()
def reset_state(sender, instance, **kwargs):
instance._original_state = instance._as_dict()
|
# Adapted from http://stackoverflow.com/questions/110803/dirty-fields-in-django
from django.db.models.signals import post_save
class DirtyFieldsMixin(object):
def __init__(self, *args, **kwargs):
super(DirtyFieldsMixin, self).__init__(*args, **kwargs)
post_save.connect(reset_state, sender=self.__class__,
dispatch_uid='%s-DirtyFieldsMixin-sweeper' % self.__class__.__name__)
reset_state(sender=self.__class__, instance=self)
def _as_dict(self):
return dict([(f.name, getattr(self, f.name)) for f in self._meta.local_fields if not f.rel])
def get_dirty_fields(self):
new_state = self._as_dict()
return dict([(key, value) for key, value in self._original_state.iteritems() if value != new_state[key]])
def is_dirty(self):
# in order to be dirty we need to have been saved at least once, so we
# check for a primary key and we need our dirty fields to not be empty
if not self.pk:
return True
return {} != self.get_dirty_fields()
def reset_state(sender, instance, **kwargs):
instance._original_state = instance._as_dict()
|
Throw a specific exception instead of a generic one
|
package me.devsaki.hentoid.parsers;
import android.webkit.URLUtil;
import java.util.List;
import me.devsaki.hentoid.database.domains.Content;
import me.devsaki.hentoid.database.domains.ImageFile;
import timber.log.Timber;
public abstract class BaseParser implements ImageListParser {
private int currentStep;
private int maxSteps;
protected abstract List<String> parseImages(Content content) throws Exception;
public List<ImageFile> parseImageList(Content content) throws Exception {
String readerUrl = content.getReaderUrl();
if (!URLUtil.isValidUrl(readerUrl))
throw new IllegalArgumentException("Invalid gallery URL : " + readerUrl);
Timber.d("Gallery URL: %s", readerUrl);
List<String> imgUrls = parseImages(content);
List<ImageFile> images = ParseHelper.urlsToImageFiles(imgUrls);
Timber.d("%s", images);
return images;
}
void progressStart(int maxSteps) {
currentStep = 0;
this.maxSteps = maxSteps;
ParseHelper.signalProgress(currentStep, maxSteps);
}
void progressPlus() {
ParseHelper.signalProgress(++currentStep, maxSteps);
}
void progressComplete() {
ParseHelper.signalProgress(maxSteps, maxSteps);
}
}
|
package me.devsaki.hentoid.parsers;
import android.webkit.URLUtil;
import java.util.List;
import me.devsaki.hentoid.database.domains.Content;
import me.devsaki.hentoid.database.domains.ImageFile;
import timber.log.Timber;
public abstract class BaseParser implements ImageListParser {
private int currentStep;
private int maxSteps;
protected abstract List<String> parseImages(Content content) throws Exception;
public List<ImageFile> parseImageList(Content content) throws Exception {
String readerUrl = content.getReaderUrl();
if (!URLUtil.isValidUrl(readerUrl)) {
throw new Exception("Invalid gallery URL : " + readerUrl);
}
Timber.d("Gallery URL: %s", readerUrl);
List<String> imgUrls = parseImages(content);
List<ImageFile> images = ParseHelper.urlsToImageFiles(imgUrls);
Timber.d("%s", images);
return images;
}
void progressStart(int maxSteps) {
currentStep = 0;
this.maxSteps = maxSteps;
ParseHelper.signalProgress(currentStep, maxSteps);
}
void progressPlus() {
ParseHelper.signalProgress(++currentStep, maxSteps);
}
void progressComplete() {
ParseHelper.signalProgress(maxSteps, maxSteps);
}
}
|
Use in every case a hash for addon name
|
"""Util addons functions."""
import hashlib
import pathlib
import re
RE_SLUGIFY = re.compile(r'[^a-z0-9_]+')
RE_SHA1 = re.compile(r"[a-f0-9]{40}")
def get_hash_from_repository(repo):
"""Generate a hash from repository."""
key = repo.lower().encode()
return hashlib.sha1(key).hexdigest()[:8]
def extract_hash_from_path(base_path, options_path):
"""Extract repo id from path."""
base_dir = pathlib.PurePosixPath(base_path).parts[-1]
dirlist = iter(pathlib.PurePosixPath(options_path).parts)
for obj in dirlist:
if obj != base_dir:
continue
repo_dir = next(dirlist)
if not RE_SHA1.match(repo_dir):
return get_hash_from_repository(repo_dir)
return repo_dir
|
"""Util addons functions."""
import hashlib
import pathlib
import re
import unicodedata
RE_SLUGIFY = re.compile(r'[^a-z0-9_]+')
def slugify(text):
"""Slugify a given text."""
text = unicodedata.normalize('NFKD', text)
text = text.lower()
text = text.replace(" ", "_")
text = RE_SLUGIFY.sub("", text)
return text
def get_hash_from_repository(repo):
"""Generate a hash from repository."""
key = repo.lower().encode()
return hashlib.sha1(key).hexdigest()[:8]
def extract_hash_from_path(base_path, options_path):
"""Extract repo id from path."""
base_dir = pathlib.PurePosixPath(base_path).parts[-1]
dirlist = iter(pathlib.PurePosixPath(options_path).parts)
for obj in dirlist:
if obj != base_dir:
continue
return slugify(next(dirlist))
|
Use click.echo() for python 2.7 compatibility
|
import logging
from regparser.tree.depth import optional_rules
from regparser.tree.depth.derive import derive_depths
import click
logger = logging.getLogger(__name__)
@click.command()
@click.argument('markers', type=click.STRING, required=True)
def outline_depths(markers) -> None:
"""
Infer an outline's structure.
Return a list of outline depths for a given list of space-separated markers.
"""
# Input is space-separated.
marker_list = markers.split(' ')
all_solutions = derive_depths(
marker_list,
[optional_rules.limit_sequence_gap(1)]
)
depths = {tuple(str(a.depth) for a in s) for s in all_solutions}.pop()
# Expected output is space-separated.
formatted_output = ' '.join(depths)
click.echo(formatted_output)
if __name__ == '__main__':
"""Enable running this command directly. E.g.,
`$ python regparser/commands/outline_depths.py`. This can save 1.5 seconds
or more of startup time.
"""
outline_depths()
|
import logging
from regparser.tree.depth import optional_rules
from regparser.tree.depth.derive import derive_depths
import click
logger = logging.getLogger(__name__)
@click.command()
@click.argument('markers', type=click.STRING, required=True)
def outline_depths(markers) -> None:
"""
Infer an outline's structure.
Return a list of outline depths for a given list of space-separated markers.
"""
# Input is space-separated.
marker_list = markers.split(' ')
all_solutions = derive_depths(
marker_list,
[optional_rules.limit_sequence_gap(1)]
)
depths = {tuple(str(a.depth) for a in s) for s in all_solutions}.pop()
# Expected output is space-separated.
formatted_output = ' '.join(depths)
print(formatted_output)
if __name__ == '__main__':
"""Enable running this command directly. E.g.,
`$ python regparser/commands/outline_depths.py`. This can save 1.5 seconds
or more of startup time.
"""
outline_depths()
|
Fix for intel routing changes
|
// ==UserScript==
// @id iitc-plugin-console@hansolo669
// @name IITC plugin: console
// @category Debug
// @version 0.0.2
// @namespace https://github.com/hansolo669/iitc-tweaks
// @updateURL https://iitc.reallyawesomedomain.com/console.meta.js
// @downloadURL https://iitc.reallyawesomedomain.com/console.user.js
// @description Utility to pipe the standard console back into IITC and esailly eval snippets
// @include https://*.ingress.com/intel*
// @include http://*.ingress.com/intel*
// @match https://*.ingress.com/intel*
// @match http://*.ingress.com/intel*
// @include https://*.ingress.com/mission/*
// @include http://*.ingress.com/mission/*
// @match https://*.ingress.com/mission/*
// @match http://*.ingress.com/mission/*
// @grant none
// ==/UserScript==
|
// ==UserScript==
// @id iitc-plugin-console@hansolo669
// @name IITC plugin: console
// @category Debug
// @version 0.0.1
// @namespace https://github.com/hansolo669/iitc-tweaks
// @updateURL https://iitc.reallyawesomedomain.com/console.meta.js
// @downloadURL https://iitc.reallyawesomedomain.com/console.user.js
// @description Utility to pipe the standard console back into IITC and esailly eval snippets
// @include https://www.ingress.com/intel*
// @include http://www.ingress.com/intel*
// @match https://www.ingress.com/intel*
// @match http://www.ingress.com/intel*
// @include https://www.ingress.com/mission/*
// @include http://www.ingress.com/mission/*
// @match https://www.ingress.com/mission/*
// @match http://www.ingress.com/mission/*
// @grant none
// ==/UserScript==
|
[FIX] purchase_stock_picking_return_invoicing: Hide button on proper counter
We should hide the view refunds button when there are no refunds, not
when there aren't invoices.
It hasn't been detected till now, because you usually have an invoice
before doing a refund, but in case you include this purchase order
in a vendor refund, you won't get any vendor bill.
TT32388
|
# Copyright 2019 ForgeFlow S.L. (https://www.forgeflow.com)
# Copyright 2017-2018 Tecnativa - Pedro M. Baeza
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "Purchase Stock Picking Return Invoicing",
"summary": "Add an option to refund returned pickings",
"version": "14.0.1.1.0",
"category": "Purchases",
"website": "https://github.com/OCA/account-invoicing",
"author": "ForgeFlow, Tecnativa, Odoo Community Association (OCA)",
"license": "AGPL-3",
"installable": True,
"development_status": "Mature",
"depends": ["purchase_stock"],
"data": ["views/account_invoice_view.xml", "views/purchase_view.xml"],
"maintainers": ["pedrobaeza", "MiquelRForgeFlow"],
}
|
# Copyright 2019 ForgeFlow S.L. (https://www.forgeflow.com)
# Copyright 2017-2018 Tecnativa - Pedro M. Baeza
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "Purchase Stock Picking Return Invoicing",
"summary": "Add an option to refund returned pickings",
"version": "14.0.1.0.0",
"category": "Purchases",
"website": "https://github.com/OCA/account-invoicing",
"author": "ForgeFlow, Tecnativa, Odoo Community Association (OCA)",
"license": "AGPL-3",
"installable": True,
"development_status": "Mature",
"depends": ["purchase_stock"],
"data": ["views/account_invoice_view.xml", "views/purchase_view.xml"],
"maintainers": ["pedrobaeza", "MiquelRForgeFlow"],
}
|
Replace level 1,2,3 with ERROR, WARNING, NOTICE.
|
var HTMLCS_RUNNER = new function() {
this.run = function(standard) {
var self = this;
// At the moment, it passes the whole DOM document.
HTMLCS.process(standard, document, function() {
var messages = HTMLCS.getMessages();
var length = messages.length;
for (var i = 0; i < length; i++) {
self.output(messages[i]);
}
console.log('done');
});
};
this.output = function(msg) {
// Simple output for now.
var typeName = 'UNKNOWN';
switch (msg.type) {
case HTMLCS.ERROR:
typeName = 'ERROR';
break;
case HTMLCS.WARNING:
typeName = 'WARNING';
break;
case HTMLCS.NOTICE:
typeName = 'NOTICE';
break;
}//end switch
console.log(typeName + '|' + msg.code + '|' + msg.msg);
};
};
|
var HTMLCS_RUNNER = new function() {
this.run = function(standard) {
var self = this;
// At the moment, it passes the whole DOM document.
HTMLCS.process(standard, document, function() {
var messages = HTMLCS.getMessages();
var length = messages.length;
for (var i = 0; i < length; i++) {
self.output(messages[i]);
}
console.log('done');
});
};
this.output = function(msg) {
// Simple output for now.
console.log(msg.type + '|' + msg.code + '|' + msg.msg);
};
};
|
Use the plugin() method instead of __call()
|
<?php
namespace SxBootstrap\View\Helper\Bootstrap;
//use SxBootstrap\Exception;
use Zend\View\Helper\AbstractHelper;
/**
* ViewHelper to add twitter bootstrap to the head.
* This WILL use the headScript and headLink helpers.
*/
class Bootstrap extends AbstractHelper
{
public function __invoke()
{
$this->prependCss();
$this->prependJs();
}
protected function prependJs()
{
$scriptHelper = $this->view->plugin('head_script');
$baseHelper = $this->view->plugin('base_path');
$scriptHelper->prependFile($baseHelper('/js/bootstrap.js'));
}
protected function prependCss()
{
$linkHelper = $this->view->plugin('head_link');
$baseHelper = $this->view->plugin('base_path');
$linkHelper->prependStylesheet($baseHelper('/css/bootstrap.css'));
}
}
|
<?php
namespace SxBootstrap\View\Helper\Bootstrap;
//use SxBootstrap\Exception;
use Zend\View\Helper\AbstractHelper;
/**
* ViewHelper to add twitter bootstrap to the head.
* This WILL use the headScript and headLink helpers.
*/
class Bootstrap extends AbstractHelper
{
public function __invoke()
{
$this->prependCss();
$this->prependJs();
}
protected function prependJs()
{
$this->getView()->headScript()->prependFile(
$this->getView()->basePath() . '/js/bootstrap.js'
);
}
protected function prependCss()
{
$this->getView()->headLink()->prependStylesheet(
$this->getView()->basePath() . '/css/bootstrap.css'
);
}
}
|
Fix indentation, remove unused import, avoid deprecated method
|
package com.rarchives.ripme.tst.ripper.rippers;
import java.io.IOException;
import java.net.URL;
import com.rarchives.ripme.ripper.rippers.ErotivRipper;
import org.junit.jupiter.api.Test;
public class ErotivRipperTest extends RippersTest {
@Test
public void testGetGID() throws IOException {
URL url = new URL("https://erotiv.io/e/1568314255");
ErotivRipper ripper = new ErotivRipper(url);
assert("1568314255".equals(ripper.getGID(url)));
}
public void testRip() throws IOException {
URL url = new URL("https://erotiv.io/e/1568314255");
ErotivRipper ripper = new ErotivRipper(url);
testRipper(ripper);
}
@Test
public void testGetURLsFromPage() throws IOException {
URL url = new URL("https://erotiv.io/e/1568314255");
ErotivRipper ripper = new ErotivRipper(url);
assert(1 == ripper.getURLsFromPage(ripper.getFirstPage()).size());
}
}
|
package com.rarchives.ripme.tst.ripper.rippers;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import com.rarchives.ripme.ripper.rippers.ErotivRipper;
import org.junit.jupiter.api.Test;
public class ErotivRipperTest extends RippersTest {
@Test
public void testGetGID() throws IOException {
URL url = new URL("https://erotiv.io/e/1568314255");
ErotivRipper ripper = new ErotivRipper(url);
assertEquals("1568314255", ripper.getGID(url));
}
public void testRip() throws IOException {
URL url = new URL("https://erotiv.io/e/1568314255");
ErotivRipper ripper = new ErotivRipper(url);
testRipper(ripper);
}
@Test
public void testGetURLsFromPage() throws IOException {
URL url = new URL("https://erotiv.io/e/1568314255");
ErotivRipper ripper = new ErotivRipper(url);
assert (1 == ripper.getURLsFromPage(ripper.getFirstPage()).size());
}
}
|
Update newListing no longer require furnishing detail
|
from django import forms
from .models import Listing, HousingUser
class ListingForm(forms.ModelForm):
furnished_details = forms.CharField(required=False, widget=forms.Textarea(attrs={
"placeholder": "Example: bed and desk only"}))
additional_lease_terms = forms.CharField(required=False,
widget=forms.Textarea(attrs={'class': 'form-control',
"placeholder": "Example: Owner pays for trash, sewer, and water. Owner "
"shovel snow, lawn, garden, driveway maintenance."}))
lease_duration_custom = forms.CharField(max_length=128, required=False,
widget=forms.TextInput(attrs={"placeholder": "Example: 6/9 - 8/9/2016"}))
date_available = forms.DateField(widget=forms.TextInput(attrs={"type": "date"}))
class Meta:
model = Listing
exclude = ("listing_owner", "is_active", "datetime_modified",
"datetime_created")
class HousingUserCreationForm(forms.ModelForm):
class Meta:
model = HousingUser
fields = ("username", "password")
|
from django import forms
from .models import Listing, HousingUser
class ListingForm(forms.ModelForm):
furnished_details = forms.CharField(widget=forms.Textarea(attrs={
"placeholder": "Example: bed and desk only"}))
additional_lease_terms = forms.CharField(required=False,
widget=forms.Textarea(attrs={'class': 'form-control',
"placeholder": "Example: Owner pays for trash, sewer, and water. Owner "
"shovel snow, lawn, garden, driveway maintenance."}))
lease_duration_custom = forms.CharField(max_length=128, required=False,
widget=forms.TextInput(attrs={"placeholder": "Example: 6/9 - 8/9/2016"}))
date_available = forms.DateField(widget=forms.TextInput(attrs={"type": "date"}))
class Meta:
model = Listing
exclude = ("listing_owner", "is_active", "datetime_modified",
"datetime_created")
class HousingUserCreationForm(forms.ModelForm):
class Meta:
model = HousingUser
fields = ("username", "password")
|
Remove Home from list of popular pages
|
from collections import OrderedDict
from django.db.models import Count
from mpages.models import Page, PageRead
def menus(request):
main_menu = Page.objects.get(slug="main-menu")
recent_updates = Page.objects.all().order_by("-updated")[:10]
updates = OrderedDict()
for page in recent_updates:
update_date = page.updated.strftime("%Y-%m-%d")
if update_date not in updates:
updates[update_date] = {"date": update_date, "pages": []}
updates[update_date]["pages"].append(page)
page_reads = PageRead.objects.all()[:50]
recent = []
for page_read in page_reads:
if page_read.page not in recent:
recent.append(page_read.page)
if len(recent) == 10:
break
popular = PageRead.objects.all().values("page__slug", "page__title").annotate(total=Count("page__slug")).order_by("-total", "page__slug")[:11]
return {
"mainmenu": main_menu.content_as_html,
"sidebar": {
"updates": list(updates.values()),
"recent": recent,
"popular": popular[1:11],
},
"request": request,
}
|
from collections import OrderedDict
from django.db.models import Count
from mpages.models import Page, PageRead
def menus(request):
main_menu = Page.objects.get(slug="main-menu")
recent_updates = Page.objects.all().order_by("-updated")[:10]
updates = OrderedDict()
for page in recent_updates:
update_date = page.updated.strftime("%Y-%m-%d")
if update_date not in updates:
updates[update_date] = {"date": update_date, "pages": []}
updates[update_date]["pages"].append(page)
page_reads = PageRead.objects.all()[:50]
recent = []
for page_read in page_reads:
if page_read.page not in recent:
recent.append(page_read.page)
if len(recent) == 10:
break
popular = PageRead.objects.all().values("page__slug", "page__title").annotate(total=Count("page__slug")).order_by("-total", "page__slug")[:10]
return {
"mainmenu": main_menu.content_as_html,
"sidebar": {
"updates": list(updates.values()),
"recent": recent,
"popular": popular,
},
"request": request,
}
|
Remove setting response status code on permission validation
|
const Factories = use('core/factories');
const Errors = use('core/errors');
const PermissionsFactory = Factories('Permissions');
function checkPermissions(body) {
if (!this.request.handler.permission) {
return body;
}
if (!this.request.session) {
throw new Errors.Unauthorized("Session is invalid");
}
return PermissionsFactory.getByMember(this.request.session.member)
.then(permissions => {
this.request.session.permissions = permissions.map(permission => permission.title.toLowerCase());
if (!this.request.session.permissions.includes(this.request.handler.permission.toLowerCase())) {
throw new Errors.Forbidden("You don't have permission to perform this action");
}
if (this.request.session.permissions.includes(this.request.handler.annotations["rootownerpermissions"])) {
this.request.session.rootOwner = true;
}
return body;
});
}
module.exports = checkPermissions;
|
const Factories = use('core/factories');
const Errors = use('core/errors');
const PermissionsFactory = Factories('Permissions');
function checkPermissions(body) {
if (!this.request.handler.permission) {
return body;
}
if (!this.request.session) {
this.response.status = this.response.statuses._401_Unauthorized;
throw new Errors.Unauthorized("Session is invalid");
}
return PermissionsFactory.getByMember(this.request.session.member)
.then(permissions => {
this.request.session.permissions = permissions.map(permission => permission.title.toLowerCase());
if (!this.request.session.permissions.includes(this.request.handler.permission.toLowerCase())) {
this.response.status = this.response.statuses._403_Forbidden;
throw new Errors.Forbidden("You don't have permission to perform this action");
}
if (this.request.session.permissions.includes(this.request.handler.annotations["rootownerpermissions"])) {
this.request.session.rootOwner = true;
}
return body;
});
}
module.exports = checkPermissions;
|
Use an OrderedDict to ensure the first option is the default option
|
#!/usr/bin/env python
# encoding: utf-8
#
# This file is part of ckanext-nhm
# Created by the Natural History Museum in London, UK
from collections import OrderedDict
# the order here matters as the default option should always be first in the dict so that it is
# automatically selected in combo boxes that use this list as a source for options
COLLECTION_CONTACTS = OrderedDict([
(u'Data Portal / Other', u'data@nhm.ac.uk'),
(u'Algae, Fungi & Plants', u'm.carine@nhm.ac.uk'),
(u'Economic & Environmental Earth Sciences', u'g.miller@nhm.ac.uk'),
(u'Fossil Invertebrates & Plants', u'z.hughes@nhm.ac.uk@nhm.ac.uk'),
(u'Fossil Vertebrates & Anthropology', u'm.richter@nhm.ac.uk'),
(u'Insects', u'g.broad@nhm.ac.uk'),
(u'Invertebrates', u'm.lowe@nhm.ac.uk'),
(u'Library & Archives', u'library@nhm.ac.uk'),
(u'Mineral & Planetary Sciences', u'm.rumsey@nhm.ac.uk'),
(u'Vertebrates', u'simon.loader@nhm.ac.uk'),
])
|
#!/usr/bin/env python
# encoding: utf-8
#
# This file is part of ckanext-nhm
# Created by the Natural History Museum in London, UK
COLLECTION_CONTACTS = {
u'Data Portal / Other': u'data@nhm.ac.uk',
u'Algae, Fungi & Plants': u'm.carine@nhm.ac.uk',
u'Economic & Environmental Earth Sciences': u'g.miller@nhm.ac.uk',
u'Fossil Invertebrates & Plants': u'z.hughes@nhm.ac.uk@nhm.ac.uk',
u'Fossil Vertebrates & Anthropology': u'm.richter@nhm.ac.uk',
u'Insects': u'g.broad@nhm.ac.uk',
u'Invertebrates': u'm.lowe@nhm.ac.uk',
u'Library & Archives': u'library@nhm.ac.uk',
u'Mineral & Planetary Sciences': u'm.rumsey@nhm.ac.uk',
u'Vertebrates': u'simon.loader@nhm.ac.uk',
}
|
chore(pins): Update pin to new dictionary hotfix pin
- Update pin to new dictionary hotfix pin
|
from setuptools import setup, find_packages
setup(
name='gdcdatamodel',
packages=find_packages(),
install_requires=[
'pytz==2016.4',
'graphviz==0.4.2',
'jsonschema==2.5.1',
'python-dateutil==2.4.2',
'dictionaryutils',
'gdcdictionary',
'psqlgraph',
'cdisutils',
],
package_data={
"gdcdatamodel": [
"xml_mappings/*.yaml",
]
},
dependency_links=[
'git+https://github.com/uc-cdis/dictionaryutils.git@2.0.4#egg=dictionaryutils',
'git+https://github.com/NCI-GDC/cdisutils.git@863ce13772116b51bcf5ce7e556f5df3cb9e6f63#egg=cdisutils',
'git+https://github.com/NCI-GDC/psqlgraph.git@1.2.0#egg=psqlgraph',
'git+https://github.com/NCI-GDC/gdcdictionary.git@1.16.1#egg=gdcdictionary',
],
entry_points={
'console_scripts': [
'gdc_postgres_admin=gdcdatamodel.gdc_postgres_admin:main'
]
},
)
|
from setuptools import setup, find_packages
setup(
name='gdcdatamodel',
packages=find_packages(),
install_requires=[
'pytz==2016.4',
'graphviz==0.4.2',
'jsonschema==2.5.1',
'python-dateutil==2.4.2',
'dictionaryutils',
'gdcdictionary',
'psqlgraph',
'cdisutils',
],
package_data={
"gdcdatamodel": [
"xml_mappings/*.yaml",
]
},
dependency_links=[
'git+https://github.com/uc-cdis/dictionaryutils.git@2.0.4#egg=dictionaryutils',
'git+https://github.com/NCI-GDC/cdisutils.git@863ce13772116b51bcf5ce7e556f5df3cb9e6f63#egg=cdisutils',
'git+https://github.com/NCI-GDC/psqlgraph.git@1.2.0#egg=psqlgraph',
'git+https://github.com/NCI-GDC/gdcdictionary.git@1.16.0#egg=gdcdictionary',
],
entry_points={
'console_scripts': [
'gdc_postgres_admin=gdcdatamodel.gdc_postgres_admin:main'
]
},
)
|
Disable debug output of sqla
|
from sqlalchemy import create_engine
from tornado.web import Application, url
from buildbox.config import settings
from buildbox.db.backend import Backend
from buildbox.web.frontend.build_list import BuildListHandler
from buildbox.web.frontend.build_details import BuildDetailsHandler
application = Application(
[
url(r"/", BuildListHandler,
name='build-list'),
url(r"/projects/([^/]+)/build/([^/]+)/", BuildDetailsHandler,
name='build-details'),
],
static_path=settings['static_path'],
template_path=settings['template_path'],
debug=settings['debug'],
sqla_engine=create_engine(
settings['database'],
# pool_size=options.mysql_poolsize,
# pool_recycle=3600,
# echo=settings['debug'],
# echo_pool=settings['debug'],
),
)
db = Backend.instance()
|
from sqlalchemy import create_engine
from tornado.web import Application, url
from buildbox.config import settings
from buildbox.db.backend import Backend
from buildbox.web.frontend.build_list import BuildListHandler
from buildbox.web.frontend.build_details import BuildDetailsHandler
application = Application(
[
url(r"/", BuildListHandler,
name='build-list'),
url(r"/projects/([^/]+)/build/([^/]+)/", BuildDetailsHandler,
name='build-details'),
],
static_path=settings['static_path'],
template_path=settings['template_path'],
debug=settings['debug'],
sqla_engine=create_engine(
settings['database'],
# pool_size=options.mysql_poolsize,
# pool_recycle=3600,
echo=settings['debug'],
echo_pool=settings['debug'],
),
)
db = Backend.instance()
|
Write CSS to file in CACHE dir
|
<?php
@ini_set('display_errors', 'off');
define('DOCROOT', rtrim(realpath(dirname(__FILE__) . '/../../../'), '/'));
define('DOMAIN', rtrim(rtrim($_SERVER['HTTP_HOST'], '/') . str_replace('/extensions/less_compiler/lib', NULL, dirname($_SERVER['PHP_SELF'])), '/'));
// Include some parts of the engine
require_once(DOCROOT . '/symphony/lib/boot/bundle.php');
require_once(CONFIG);
require_once('dist/lessc.inc.php');
function processParams($string){
$param = (object)array(
'file' => 0
);
if(preg_match_all('/^(.+)$/i', $string, $matches, PREG_SET_ORDER)){
$param->file = $matches[0][1];
}
return $param;
}
$param = processParams($_GET['param']);
header('Content-type: text/css');
$lc = new lessc(WORKSPACE . '/' . $param->file);
$css = $lc->parse();
$filename = pathinfo($param->file);
$filename = $filename['filename'];
file_put_contents(CACHE . '/less_compiler/' . $filename . '.css', $css);
echo $css;
exit;
|
<?php
@ini_set('display_errors', 'off');
define('DOCROOT', rtrim(realpath(dirname(__FILE__) . '/../../../'), '/'));
define('DOMAIN', rtrim(rtrim($_SERVER['HTTP_HOST'], '/') . str_replace('/extensions/less_compiler/lib', NULL, dirname($_SERVER['PHP_SELF'])), '/'));
// Include some parts of the engine
require_once(DOCROOT . '/symphony/lib/boot/bundle.php');
require_once(CONFIG);
require_once('dist/lessc.inc.php');
function processParams($string){
$param = (object)array(
'file' => 0
);
if(preg_match_all('/^(.+)$/i', $string, $matches, PREG_SET_ORDER)){
$param->file = $matches[0][1];
}
return $param;
}
$param = processParams($_GET['param']);
header('Content-type: text/css');
$lc = new lessc(WORKSPACE . '/' . $param->file);
echo $lc->parse();
exit;
|
Change Buffalo spawn egg colors
|
package totemic_commons.pokefenn.entity;
import cpw.mods.fml.common.registry.EntityRegistry;
import totemic_commons.pokefenn.Totemic;
import totemic_commons.pokefenn.entity.animal.EntityBuffalo;
import totemic_commons.pokefenn.lib.Strings;
/**
* Created by Pokefenn.
* Licensed under MIT (If this is one of my Mods)
*/
public final class ModEntities
{
public static void init()
{
EntityRegistry.registerGlobalEntityID(EntityBuffalo.class, Strings.BUFFALO_NAME, EntityRegistry.findGlobalUniqueEntityId(), 0x2a1c12, 0x885f3e);
EntityRegistry.registerModEntity(EntityBuffalo.class, Strings.BUFFALO_NAME, 0, Totemic.instance, 80, 5, true);
//EntityRegistry.addSpawn(EntityBuffalo.class, 1000, 2, 4, EnumCreatureType.creature); //No biomes to spawn in
}
}
|
package totemic_commons.pokefenn.entity;
import cpw.mods.fml.common.registry.EntityRegistry;
import totemic_commons.pokefenn.Totemic;
import totemic_commons.pokefenn.entity.animal.EntityBuffalo;
import totemic_commons.pokefenn.lib.Strings;
/**
* Created by Pokefenn.
* Licensed under MIT (If this is one of my Mods)
*/
public final class ModEntities
{
public static void init()
{
EntityRegistry.registerGlobalEntityID(EntityBuffalo.class, Strings.BUFFALO_NAME, EntityRegistry.findGlobalUniqueEntityId(), 0x001330, 0x323122);
EntityRegistry.registerModEntity(EntityBuffalo.class, Strings.BUFFALO_NAME, 0, Totemic.instance, 80, 5, true);
//EntityRegistry.addSpawn(EntityBuffalo.class, 1000, 2, 4, EnumCreatureType.creature); //No biomes to spawn in
}
}
|
Test commit, magnus tok ikke feil
|
package models;
import java.util.ArrayList;
import java.util.List;
import javax.persistence.CascadeType;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.ManyToMany;
import javax.persistence.ManyToOne;
import javax.persistence.OneToMany;
import javax.persistence.OneToOne;
import com.fasterxml.jackson.annotation.JsonIgnore;
import play.db.ebean.Model;
import play.db.ebean.Model.Finder;
@Entity
public class Guest extends Model {
@Id
public Long id;
@OneToMany(cascade = CascadeType.ALL, mappedBy = "guest")
@JsonIgnore
public List<Booking> booking =new ArrayList<Booking>();
/** TODO REMOVE TEST **/
public Guest(long id) {
this.id = id;
}
/** END TEST **/
public static Finder<Long, Guest> find = new Finder<Long, Guest>(Long.class, Guest.class);
}
|
package models;
import java.util.ArrayList;
import java.util.List;
import javax.persistence.CascadeType;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.ManyToMany;
import javax.persistence.ManyToOne;
import javax.persistence.OneToMany;
import javax.persistence.OneToOne;
import com.fasterxml.jackson.annotation.JsonIgnore;
import play.db.ebean.Model;
import play.db.ebean.Model.Finder;
//tar magnus feil?
@Entity
public class Guest extends Model {
@Id
public Long id;
@OneToMany(cascade = CascadeType.ALL, mappedBy = "guest")
@JsonIgnore
public List<Booking> booking =new ArrayList<Booking>();
/** TODO REMOVE TEST **/
public Guest(long id) {
this.id = id;
}
/** END TEST **/
public static Finder<Long, Guest> find = new Finder<Long, Guest>(Long.class, Guest.class);
}
|
Revert "fix util.js usage and use utils.Adapter"
This reverts commit 6bfb11f7f626735a63202ec846b3aaf49dbb9898.
|
var srcDir = __dirname + "/../";
module.exports = {
all: {
src: [
srcDir + "*.js",
srcDir + "lib/*.js",
srcDir + "adapter/example/*.js",
srcDir + "tasks/**/*.js",
srcDir + "www/**/*.js",
'!' + srcDir + "www/lib/**/*.js",
'!' + srcDir + 'node_modules/**/*.js',
'!' + srcDir + 'adapter/*/node_modules/**/*.js'
],
options: require('./jscsRules.js')
}
};
|
var srcDir = __dirname + "/../";
module.exports = {
all: {
src: [
srcDir + "*.js",
srcDir + "lib/*.js",
srcDir + "adapter/example/*.js",
srcDir + "tasks/**/*.js",
srcDir + "www/**/*.js",
'!' + srcDir + "www/lib/**/*.js",
'!' + srcDir + 'node_modules/**/*.js',
'!' + srcDir + 'adapter/*/node_modules/**/*.js'
],
options: require('./jscsRules.js')
}
};
|
Fix bug with array_agg_mult() function not actually being created.
|
"""Actions to run at server startup.
"""
from django.db import connection
from django.db import transaction
def run():
"""Call this from manage.py or tests.
"""
_add_custom_mult_agg_function()
def _add_custom_mult_agg_function():
"""Make sure the Postgresql database has a custom function array_agg_mult.
NOTE: Figured out the raw sql query by running psql with -E flag
and then calling \df to list functions. The -E flag causes the internal
raw sql of the commands to be shown.
"""
cursor = connection.cursor()
cursor.execute(
'SELECT p.proname '
'FROM pg_catalog.pg_proc p '
'WHERE p.proname=\'array_agg_mult\''
)
mult_agg_exists = bool(cursor.fetchone())
if not mult_agg_exists:
cursor.execute(
'CREATE AGGREGATE array_agg_mult (anyarray) ('
' SFUNC = array_cat'
' ,STYPE = anyarray'
' ,INITCOND = \'{}\''
');'
)
transaction.commit_unless_managed()
|
"""Actions to run at server startup.
"""
from django.db import connection
def run():
"""Call this from manage.py or tests.
"""
_add_custom_mult_agg_function()
def _add_custom_mult_agg_function():
"""Make sure the Postgresql database has a custom function array_agg_mult.
NOTE: Figured out the raw sql query by running psql with -E flag
and then calling \df to list functions. The -E flag causes the internal
raw sql of the commands to be shown.
"""
cursor = connection.cursor()
cursor.execute(
'SELECT p.proname '
'FROM pg_catalog.pg_proc p '
'WHERE p.proname=\'array_agg_mult\''
)
mult_agg_exists = bool(cursor.fetchone())
if not mult_agg_exists:
cursor.execute(
'CREATE AGGREGATE array_agg_mult (anyarray) ('
' SFUNC = array_cat'
' ,STYPE = anyarray'
' ,INITCOND = \'{}\''
');'
)
|
Add Switch and Redirect for Router
|
import React from 'react';
import ReactDOM from 'react-dom';
import {
BrowserRouter as Router,
Redirect,
Route,
Switch,
} from 'react-router-dom';
import './styles/index.css';
import Background from './components/Background';
import Footer from './components/Footer';
import Header from './components/Header';
import ScrollToTop from './components/ScrollToTop';
import Home from './scenes/Home';
import Store from './scenes/Store';
import registerServiceWorker from './registerServiceWorker';
import './i18n';
if (process.env.NODE_ENV === 'production') {
window.Raven
.config('https://0ddfcefcf922465488c2dde443f9c9d5@sentry.io/230876')
.install();
}
ReactDOM.render(
<Router>
<ScrollToTop>
<Background />
<Header />
<Switch>
<Route exact path="/" component={Home} />
<Route path="/store" component={Store} />
<Redirect from="*" to="/" />
</Switch>
<Footer />
</ScrollToTop>
</Router>,
document.getElementById('root'),
);
registerServiceWorker();
|
import React from 'react';
import ReactDOM from 'react-dom';
import { BrowserRouter, Route } from 'react-router-dom';
import './styles/index.css';
import Background from './components/Background';
import Footer from './components/Footer';
import Header from './components/Header';
import ScrollToTop from './components/ScrollToTop';
import Home from './scenes/Home';
import Store from './scenes/Store';
import registerServiceWorker from './registerServiceWorker';
import './i18n';
if (process.env.NODE_ENV === 'production') {
window.Raven
.config('https://0ddfcefcf922465488c2dde443f9c9d5@sentry.io/230876')
.install();
}
ReactDOM.render(
<BrowserRouter>
<ScrollToTop>
<Background />
<Header />
<Route exact path="/" component={Home} />
<Route path="/store" component={Store} />
<Footer />
</ScrollToTop>
</BrowserRouter>,
document.getElementById('root'),
);
registerServiceWorker();
|
Fix instance reference in helper
|
Template.taggedCourses.onCreated(function(){
// Get reference to template instance
var instance = this;
// Accessing the Iron.controller to invoke getParams method of Iron Router.
var router = Router.current();
// Getting Params of the URL
instance.tag = router.params.tag;
// Subscribe to courses tagged with the current tag
instance.subscribe('taggedCourses', instance.tag);
// Subscribe to course images
instance.subscribe('images');
});
Template.taggedCourses.rendered = function () {
// Get reference to template instance
var instance = this;
// Set the page site title for SEO
Meta.setTitle('Courses tagged "' + instance.tag + '"');
};
Template.taggedCourses.helpers({
'courses': function () {
return Courses.find().fetch();
},
'tag': function () {
// Get reference to template instance
var instance = Template.instance();
// Get tag from instance
var tag = instance.tag;
console.log(tag);
return tag;
}
});
|
Template.taggedCourses.helpers({
'courses': function () {
return Courses.find().fetch();
},
'tag': function () {
// Get reference to template instance
var instance = this;
// Get tag from instance
var tag = instance.tag;
return tag;
}
});
Template.taggedCourses.onCreated(function(){
// Get reference to template instance
var instance = this;
// Accessing the Iron.controller to invoke getParams method of Iron Router.
var router = Router.current();
// Getting Params of the URL
instance.tag = router.params.tag;
// Subscribe to courses tagged with the current tag
instance.subscribe('taggedCourses', instance.tag);
// Subscribe to course images
instance.subscribe('images');
});
Template.taggedCourses.rendered = function () {
// Get reference to template instance
var instance = this;
// Set the page site title for SEO
Meta.setTitle('Courses tagged "' + instance.tag + '"');
};
|
Use static imports for standard test utilities
pr-link: Alluxio/alluxio#8985
change-id: cid-2520d114c17da815ef1c596fb55e0b9a64d7070d
|
/*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio;
import static org.junit.Assert.assertEquals;
import org.junit.Test;
import org.junit.runners.model.Statement;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
/**
* Unit tests for {@link SystemOutRule}.
*/
public class SystemOutRuleTest {
private static final ByteArrayOutputStream OUTPUT = new ByteArrayOutputStream();
private static final PrintStream ORIGINAL_SYSTEM_OUT = System.out;
private Statement mStatement = new Statement() {
@Override
public void evaluate() throws Throwable {
System.out.println("2048");
assertEquals("2048\n", OUTPUT.toString());
OUTPUT.reset();
System.out.println("1234");
assertEquals("1234\n", OUTPUT.toString());
}
};
@Test
public void testSystemOutRule() throws Throwable {
new SystemOutRule(OUTPUT).apply(mStatement, null).evaluate();
assertEquals(System.out, ORIGINAL_SYSTEM_OUT);
}
}
|
/*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runners.model.Statement;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
/**
* Unit tests for {@link SystemOutRule}.
*/
public class SystemOutRuleTest {
private static final ByteArrayOutputStream OUTPUT = new ByteArrayOutputStream();
private static final PrintStream ORIGINAL_SYSTEM_OUT = System.out;
private Statement mStatement = new Statement() {
@Override
public void evaluate() throws Throwable {
System.out.println("2048");
Assert.assertEquals("2048\n", OUTPUT.toString());
OUTPUT.reset();
System.out.println("1234");
Assert.assertEquals("1234\n", OUTPUT.toString());
}
};
@Test
public void testSystemOutRule() throws Throwable {
new SystemOutRule(OUTPUT).apply(mStatement, null).evaluate();
Assert.assertEquals(System.out, ORIGINAL_SYSTEM_OUT);
}
}
|
Sort emissions in `froMEmissions` helper
|
import React from 'react'
import ObservableView from './view'
const isEmission = (obj) => (
typeof obj === 'object' &&
typeof obj.x === 'number' &&
obj.x === obj.x &&
obj.x > 0 &&
obj.d !== undefined &&
obj.d !== null
)
const selectValue = obj => obj.x
function fromEmissions(arr, end) {
if (!Array.isArray(arr) || !arr.every(isEmission)) {
throw new Error([
'Expected each value in `emissions` to be an emission',
'({ x: [number], d: [string] })'
].join('. '))
}
const min = Math.min.apply(null, arr.map(selectValue))
const max = typeof end === 'number' ? end : Math.max.apply(null, arr.map(selectValue))
const range = max - min
const minFactor = min / range
const emissions = arr
.filter(({ x }) => x <= max)
.sort((a, b) => a.x - b.x)
.map(({ x, ...rest }) => ({
...rest,
x: x / range - minFactor
}))
const completion = max / range - minFactor
return props => (
<ObservableView
{...props}
completion={completion}
emissions={emissions}
/>
)
}
export default fromEmissions
|
import React from 'react'
import ObservableView from './view'
const isEmission = (obj) => (
typeof obj === 'object' &&
typeof obj.x === 'number' &&
obj.x === obj.x &&
obj.x > 0 &&
obj.d !== undefined &&
obj.d !== null
)
const selectValue = obj => obj.x
function fromEmissions(arr, end) {
if (!Array.isArray(arr) || !arr.every(isEmission)) {
throw new Error([
'Expected each value in `emissions` to be an emission',
'({ x: [number], d: [string] })'
].join('. '))
}
const min = Math.min.apply(null, arr.map(selectValue))
const max = typeof end === 'number' ? end : Math.max.apply(null, arr.map(selectValue))
const range = max - min
const minFactor = min / range
const emissions = arr
.filter(({ x }) => x <= max)
.map(({ x, ...rest }) => ({
...rest,
x: x / range - minFactor
}))
const completion = max / range - minFactor
return props => (
<ObservableView
{...props}
completion={completion}
emissions={emissions}
/>
)
}
export default fromEmissions
|
Update - removed redundant constants from class
|
<?php
/**
* Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
namespace Amazon\Payment\Domain;
class AmazonAuthorizationStatus extends AbstractAmazonStatus
{
const STATE_OPEN = 'Open';
const STATE_PENDING = 'Pending';
const STATE_DECLINED = 'Declined';
const STATE_CLOSED = 'Closed';
const REASON_INVALID_PAYMENT_METHOD = 'InvalidPaymentMethod';
const REASON_PROCESSING_FAILURE = 'ProcessingFailure';
const REASON_AMAZON_REJECTED = 'AmazonRejected';
const REASON_TRANSACTION_TIMEOUT = 'TransactionTimedOut';
const REASON_MAX_CAPTURES_PROCESSED = 'MaxCapturesProcessed';
const REASON_SELLER_CLOSED = 'SellerClosed';
const REASON_EXPIRED_UNUSED = 'ExpiredUnused';
}
|
<?php
/**
* Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
namespace Amazon\Payment\Domain;
class AmazonAuthorizationStatus extends AbstractAmazonStatus
{
const STATE_OPEN = 'Open';
const STATE_PENDING = 'Pending';
const STATE_DECLINED = 'Declined';
const STATE_CLOSED = 'Closed';
const REASON_INVALID_PAYMENT_METHOD = 'InvalidPaymentMethod';
const REASON_PROCESSING_FAILURE = 'ProcessingFailure';
const REASON_AMAZON_REJECTED = 'AmazonRejected';
const REASON_TRANSACTION_TIMEOUT = 'TransactionTimedOut';
const REASON_MAX_CAPTURES_PROCESSED = 'MaxCapturesProcessed';
const REASON_SELLER_CLOSED = 'SellerClosed';
const REASON_EXPIRED_UNUSED = 'ExpiredUnused';
const CODE_HARD_DECLINE = 4273;
const CODE_SOFT_DECLINE = 7638;
}
|
FIX bug in event creation
|
##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Nathan Fluckiger <nathan.fluckiger@hotmail.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import models, api
class EventCompassion(models.Model):
_inherit = "crm.event.compassion"
@api.model
def create(self, vals):
event = super().create(vals)
if event.campaign_id:
event.analytic_id.campaign_id = event.campaign_id
event.origin_id.campaign_id = event.campaign_id
return event
@api.multi
def write(self, vals):
res = super().write(vals)
for new_event in self:
if new_event.campaign_id:
if new_event.analytic_id:
new_event.analytic_id.campaign_id = new_event.campaign_id
if new_event.origin_id:
new_event.origin_id.campaign_id = new_event.campaign_id
return res
|
##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Nathan Fluckiger <nathan.fluckiger@hotmail.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import models, api
class EventCompassion(models.Model):
_inherit = "crm.event.compassion"
@api.model
def create(self, vals):
event = super().create(vals)
if event.campaign_id:
event.analytic_id.campaign_id = event.campaign_id
event.origin_id.campaign_id = event.campaign_id
return event
@api.multi
def write(self, vals):
res = super().write(vals)
for new_event in self:
if new_event.campaign_id:
new_event.analytic_id.campaign_id = new_event.campaign_id
new_event.origin_id.campaign_id = new_event.campaign_id
return res
|
Use port from environment variable if specified
|
package main
import (
"fmt"
"io/ioutil"
"log"
"net/http"
"os"
"sort"
)
func init() {
log.SetOutput(os.Stdout)
}
func handler(w http.ResponseWriter, r *http.Request) {
log.Println("Got", r.Method, "to:", r.URL)
var keys []string
for k := range r.Header {
keys = append(keys, k)
}
sort.Strings(keys)
log.Println("Request Headers:")
for _, k := range keys {
log.Println(" ", k, ":", r.Header[k])
}
log.Println("Body:")
body, err := ioutil.ReadAll(r.Body)
if err != nil {
panic(-1)
}
log.Println(string(body))
log.Println("---End Body---")
fmt.Fprintln(w, "Logged request.")
}
func main() {
http.HandleFunc("/", handler)
port := "6000"
if os.Getenv("PORT") != "" {
port = os.Getenv("PORT")
}
log.Println("Listening on port", port, "...")
http.ListenAndServe(":"+port, nil)
}
|
package main
import (
"fmt"
"io/ioutil"
"log"
"net/http"
"os"
"sort"
)
func init() {
log.SetOutput(os.Stdout)
}
func handler(w http.ResponseWriter, r *http.Request) {
log.Println("Got", r.Method, "to:", r.URL)
var keys []string
for k := range r.Header {
keys = append(keys, k)
}
sort.Strings(keys)
log.Println("Request Headers:")
for _, k := range keys {
log.Println(" ", k, ":", r.Header[k])
}
log.Println("Body:")
body, err := ioutil.ReadAll(r.Body)
if err != nil {
panic(-1)
}
log.Println(string(body))
log.Println("---End Body---")
fmt.Fprintln(w, "Logged request.")
}
func main() {
http.HandleFunc("/", handler)
log.Println("Listening on port 6000...")
http.ListenAndServe(":6000", nil)
}
|
Add array sort for node 0.8
|
var dirGlob = require('../../../lib/fs/dir-glob');
require('chai').should();
describe('lib', function () {
describe('fs', function () {
describe('dir-glob', function () {
var globSync;
beforeEach(function () {
globSync = dirGlob.globSync;
});
it('should find directories', function (next) {
var rootPath = __dirname;
var nodes = 'test-bundles/*';
var mask = rootPath + '/' + nodes;
globSync(mask).sort().should.deep.equal([
rootPath + '/test-bundles/bundle1',
rootPath + '/test-bundles/bundle2'
]);
next();
});
});
});
});
|
var dirGlob = require('../../../lib/fs/dir-glob');
require('chai').should();
describe('lib', function () {
describe('fs', function () {
describe('dir-glob', function () {
var globSync;
beforeEach(function () {
globSync = dirGlob.globSync;
});
it('should find directories', function (next) {
var rootPath = __dirname;
var nodes = 'test-bundles/*';
var mask = rootPath + '/' + nodes;
globSync(mask).should.deep.equal([
rootPath + '/test-bundles/bundle1',
rootPath + '/test-bundles/bundle2'
]);
next();
});
});
});
});
|
Use session middleware before mounting grant
|
var koa = require('koa')
, router = require('koa-router')
, mount = require('koa-mount')
, bodyParser = require('koa-bodyparser')
, koaqs = require('koa-qs')
, session = require('koa-session')
, accesslog = require('koa-accesslog')
var Grant = require('grant-koa')
, grant = new Grant(require('./config.json'))
var app = koa()
app.keys = ['secret','key']
app.use(accesslog())
app.use(session(app))
app.use(mount(grant))
app.use(bodyParser())
app.use(router(app))
koaqs(app)
app.get('/handle_facebook_callback', function *(next) {
console.log(this.query)
this.body = JSON.stringify(this.query, null, 2)
})
app.get('/handle_twitter_callback', function *(next) {
console.log(this.query)
this.body = JSON.stringify(this.query, null, 2)
})
app.listen(3000, function() {
console.log('Koa server listening on port ' + 3000)
})
|
var koa = require('koa')
, router = require('koa-router')
, mount = require('koa-mount')
, bodyParser = require('koa-bodyparser')
, koaqs = require('koa-qs')
, session = require('koa-session')
, accesslog = require('koa-accesslog')
var Grant = require('grant-koa')
, grant = new Grant(require('./config.json'))
var app = koa()
app.keys = ['secret','key']
app.use(accesslog())
app.use(mount(grant))
app.use(bodyParser())
app.use(session(app))
app.use(router(app))
koaqs(app)
app.get('/handle_facebook_callback', function *(next) {
console.log(this.query)
this.body = JSON.stringify(this.query, null, 2)
})
app.get('/handle_twitter_callback', function *(next) {
console.log(this.query)
this.body = JSON.stringify(this.query, null, 2)
})
app.listen(3000, function() {
console.log('Koa server listening on port ' + 3000)
})
|
Refactor server and add simple debug mode
|
var express = require('express');
var app = express();
var server = require('http').createServer(app);
var io = require('socket.io')(server);
var midi = require('midi');
var debug = false;
var httpPort = 80;
var midiPort = 1;
// Midi Stuff
// Set up a new input.
var input = new midi.input();
var output = new midi.output();
// Get the name of a specified input port.
console.log('Opening midi port:', input.getPortName(midiPort));
input.openPort(midiPort);
output.openPort(midiPort);
server.listen(port, function () {
console.log('Server listening at port %d', port);
});
// Routing (for index.html)
app.use(express.static(__dirname + '/public'));
io.on('connection', function (socket) {
// When we recieve a midiout event, push that to the midi port
socket.on('midiout', function (data) {
debugMessage('OUTPUT', data);
output.sendMessage(data);
});
// Emit a socket for input from the piano to clients
input.on('message', function(deltaTime, message) {
debugMessage('INPUT ', message);
socket.emit('midiin', message);
});
});
// Helper func for message debug
function debugMessage() {
if (debug) console.debug(arguments);
}
|
var express = require('express');
var app = express();
var server = require('http').createServer(app);
var io = require('socket.io')(server);
var midi = require('midi');
var port = 80;
// Midi Stuff
// Set up a new input.
var input = new midi.input();
var output = new midi.output();
// Get the name of a specified input port.
console.log('Opening midi port:', input.getPortName(1));
input.openPort(1);
output.openPort(1);
server.listen(port, function () {
console.log('Server listening at port %d', port);
});
// Routing
app.use(express.static(__dirname + '/public'));
io.on('connection', function (socket) {
// when the client emits 'new message', this listens and executes
socket.on('midiout', function (data) {
console.log('Midi Output:', data);
output.sendMessage(data);
});
// Configure a callback.
input.on('message', function(deltaTime, message) {
socket.emit('midiin', message);
});
});
|
Reformat & move comment checking stuff to subclass.
|
<?php
namespace Gothick\AkismetClient;
class ClientResult
{
const PRO_TIP_HEADER = 'X-akismet-pro-tip';
/**
* Raw string we got back from the Akismet API as an answer
* @var string
*/
protected $raw_result;
/**
* Akismet's X-akismet-pro-tip header, which sometimes has
* useful extra information.
* @var unknown
*/
protected $pro_tip;
public function __construct (\GuzzleHttp\Psr7\Response $response)
{
if ($response->getStatusCode() != 200)
{
// Our clients are meant to check first
throw new Exception(
'Response with invalid status code in ' . __METHOD__);
}
$this->raw_result = (string) $response->getBody();
if ($response->hasHeader(self::PRO_TIP_HEADER))
{
$this->pro_tip = $response->getHeader(self::PRO_TIP_HEADER);
}
}
public function hasProTip()
{
return (!empty($this->pro_tip));
}
public function getProTip()
{
return $this->pro_tip;
}
}
|
<?php
namespace Gothick\AkismetClient;
class ClientResult
{
const PRO_TIP_HEADER = 'X-akismet-pro-tip';
private $raw_result;
private $pro_tip;
public function __construct(\GuzzleHttp\Psr7\Response $response)
{
if ($response->getStatusCode() != 200)
{
// Our clients are meant to check first
throw new Exception('Response with invalid status code in ' . __METHOD__);
}
$this->raw_result = (string) $response->getBody();
if ($response->hasHeader(self::PRO_TIP_HEADER))
{
$this->pro_tip = $response->getHeader(self::PRO_TIP_HEADER);
}
}
public function isSpam() {
return $this->raw_result == 'true';
}
public function isBlatantSpam() {
return (isSpam() && $this->pro_tip == 'disacrd');
}
}
|
Add GraphQL schema description for page_size for deleteMany and updateMany
|
'use strict';
const { GraphQLInt, GraphQLString } = require('graphql');
// Pagination arguments
const paginationActionTypes = ['find', 'update', 'delete'];
const fullPaginationActionTypes = ['find'];
const getPaginationArgument = function ({ action: { actionType, multiple }, defaultPageSize, maxPageSize }) {
// Only with operations that return an array and do not provide array of data, i.e. only with findMany, deleteMany and
// updateMany
if (!(paginationActionTypes.includes(actionType) && multiple)) { return; }
const paginationArgs = {
page_size: {
type: GraphQLInt,
description: `Sets pagination size.
Using 0 disables pagination.
Maximum: ${maxPageSize}`,
defaultValue: defaultPageSize,
}
};
// Only with safe operations that return an array, i.e. only with findMany
if (!(fullPaginationActionTypes.includes(actionType) && multiple)) { return paginationArgs; }
return Object.assign(paginationArgs, {
after: {
type: GraphQLString,
description: `Retrieves next pagination batch, using the previous response's last model's 'token'.
Using '' means 'from the beginning'`,
defaultValue: '',
},
before: {
type: GraphQLString,
description: `Retrieves previous pagination batch, using the previous response's first model's 'token'.
Using '' means 'from the end'`,
},
page: {
type: GraphQLInt,
description: `Page number, for pagination.
Starts at 1.
Cannot be used with 'before' or 'after'`,
},
});
};
module.exports = {
getPaginationArgument,
};
|
'use strict';
const { GraphQLInt, GraphQLString } = require('graphql');
// Pagination arguments
const paginationActionTypes = ['find'];
const getPaginationArgument = function ({ action: { actionType, multiple }, defaultPageSize, maxPageSize }) {
// Only with safe operations that return an array, i.e. only with findMany
if (!(paginationActionTypes.includes(actionType) && multiple)) { return; }
return {
page_size: {
type: GraphQLInt,
description: `Sets pagination size.
Using 0 disables pagination.
Maximum: ${maxPageSize}`,
defaultValue: defaultPageSize,
},
after: {
type: GraphQLString,
description: `Retrieves next pagination batch, using the previous response's last model's 'token'.
Using '' means 'from the beginning'`,
defaultValue: '',
},
before: {
type: GraphQLString,
description: `Retrieves previous pagination batch, using the previous response's first model's 'token'.
Using '' means 'from the end'`,
},
page: {
type: GraphQLInt,
description: `Page number, for pagination.
Starts at 1.
Cannot be used with 'before' or 'after'`,
},
};
};
module.exports = {
getPaginationArgument,
};
|
Add externals to stop rollup unresolved dependency warnings
|
import resolve from 'rollup-plugin-node-resolve';
import common from 'rollup-plugin-commonjs';
import json from 'rollup-plugin-json';
import replace from 'rollup-plugin-re';
import typescript from 'rollup-plugin-typescript';
export default {
input: 'lib/index.ts',
output: {
file: 'dist/bundle.js',
format: 'cjs'
},
external: ['https', 'atom', 'dgram', 'fs', 'child_process', 'crypto', 'path', 'os'],
plugins: [
replace({
patterns: [
{
match: /formidable(\/|\\)lib/,
test: 'if (global.GENTLY) require = GENTLY.hijack(require);',
replace: '',
},
{
match: /analytics-node\/index.js/,
test: './package',
replace: './package.json'
}
]
}),
typescript({
typescript: require('typescript')
}),
common({
include: 'node_modules/**'
}),
json(),
resolve(),
]
};
|
import resolve from 'rollup-plugin-node-resolve';
import common from 'rollup-plugin-commonjs';
import json from 'rollup-plugin-json';
import replace from 'rollup-plugin-re';
import typescript from 'rollup-plugin-typescript';
export default {
input: 'lib/index.ts',
output: {
file: 'dist/bundle.js',
format: 'cjs'
},
plugins: [
replace({
patterns: [
{
match: /formidable(\/|\\)lib/,
test: 'if (global.GENTLY) require = GENTLY.hijack(require);',
replace: '',
},
{
match: /analytics-node\/index.js/,
test: './package',
replace: './package.json'
}
]
}),
typescript({
typescript: require('typescript')
}),
common({
include: 'node_modules/**'
}),
json(),
resolve(),
]
};
|
Add options object to props
|
var React = require('react');
var data = require('./data.full.js');
var options = {
rowHeight: 40
};
var ReactDataTable = React.createClass({
render: function() {
var createCell = function createCell (key, value) {
return <td key={key}>{value}</td>;
};
var createRow = function createRow (row) {
return (
<tr key={row._id}>{
Object.keys(row).map( function(key) {
return createCell(key, row[key]);
})
}</tr>
);
};
var createHeaderCell = function createHeaderCell (key) {
return <th key={key}>{key}</th>;
};
var createTable = function createTable (table) {
return (
<table>
<thead>{
Object.keys(table[0]).map(function (key) {
return createHeaderCell(key);
})
}</thead>
<tbody>
{table.map(createRow)}
</tbody>
</table>
);
}
return createTable(this.props.data);
}
});
React.render(<ReactDataTable data={data} options={options} />, document.getElementById('app'));
|
var React = require('react');
var data = require('./data.full.js');
var ReactDataTable = React.createClass({
render: function() {
var createCell = function createCell (key, value) {
return <td key={key}>{value}</td>;
};
var createRow = function createRow (row) {
return (
<tr key={row._id}>{
Object.keys(row).map( function(key) {
return createCell(key, row[key]);
})
}</tr>
);
};
var createHeaderCell = function createHeaderCell (key) {
return <th key={key}>{key}</th>;
};
var createTable = function createTable (table) {
return (
<table>
<thead>{
Object.keys(table[0]).map(function (key) {
return createHeaderCell(key);
})
}</thead>
<tbody>
{table.map(createRow)}
</tbody>
</table>
);
}
return createTable(this.props.data);
}
});
React.render(<ReactDataTable data={data} />, document.getElementById('app'));
|
Switch to start_all_workers in RPC server
This does the same as the logic present but it emits
the registry callback event for resources.PROCESS AFTER_SPAWN
that some plugins may be expecting.
Change-Id: I6f9aeca753a5d3c0052f553a2ac46786ca113e1e
Related-Bug: #1687896
|
#!/usr/bin/env python
# Copyright 2011 VMware, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# If ../neutron/__init__.py exists, add ../ to Python search path, so that
# it will override what happens to be installed in /usr/(local/)lib/python...
from oslo_log import log
from neutron._i18n import _LI
from neutron import manager
from neutron import service
LOG = log.getLogger(__name__)
def eventlet_rpc_server():
LOG.info(_LI("Eventlet based AMQP RPC server starting..."))
try:
manager.init()
rpc_workers_launcher = service.start_all_workers()
except NotImplementedError:
LOG.info(_LI("RPC was already started in parent process by "
"plugin."))
else:
rpc_workers_launcher.wait()
|
#!/usr/bin/env python
# Copyright 2011 VMware, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# If ../neutron/__init__.py exists, add ../ to Python search path, so that
# it will override what happens to be installed in /usr/(local/)lib/python...
from oslo_log import log
from neutron._i18n import _LI
from neutron import manager
from neutron import service
LOG = log.getLogger(__name__)
def eventlet_rpc_server():
LOG.info(_LI("Eventlet based AMQP RPC server starting..."))
try:
manager.init()
workers = service._get_rpc_workers() + service._get_plugins_workers()
rpc_workers_launcher = service._start_workers(workers)
except NotImplementedError:
LOG.info(_LI("RPC was already started in parent process by "
"plugin."))
else:
rpc_workers_launcher.wait()
|
Enable Apollo devtools in production
|
import React from "react";
import ReactDOM from "react-dom";
import { RoutingApp } from "./modules/RoutingApp";
import store from "./store";
import { Provider } from "react-redux";
import { STUY_SPEC_API_URL } from "./constants";
import { ApolloProvider } from "react-apollo";
import { ApolloClient } from "apollo-client";
import { HttpLink } from "apollo-link-http";
import { InMemoryCache } from "apollo-cache-inmemory";
import { objectFilter } from "./utils";
const apolloClient = new ApolloClient({
link: new HttpLink({ uri: `${STUY_SPEC_API_URL}/graphql` }),
cache: new InMemoryCache(),
connectToDevTools: true
});
Object.filter = objectFilter;
const App = () => (
<Provider store={store}>
<ApolloProvider client={apolloClient}>
<RoutingApp />
</ApolloProvider>
</Provider>
);
ReactDOM.render(
<App />,
document.getElementById("app"),
);
|
import React from "react";
import ReactDOM from "react-dom";
import { RoutingApp } from "./modules/RoutingApp";
import store from "./store";
import { Provider } from "react-redux";
import { STUY_SPEC_API_URL } from "./constants";
import { ApolloProvider } from "react-apollo";
import { ApolloClient } from "apollo-client";
import { HttpLink } from "apollo-link-http";
import { InMemoryCache } from "apollo-cache-inmemory";
import { objectFilter } from "./utils";
const apolloClient = new ApolloClient({
link: new HttpLink({ uri: `${STUY_SPEC_API_URL}/graphql` }),
cache: new InMemoryCache(),
});
Object.filter = objectFilter;
const App = () => (
<Provider store={store}>
<ApolloProvider client={apolloClient}>
<RoutingApp />
</ApolloProvider>
</Provider>
);
ReactDOM.render(
<App />,
document.getElementById("app"),
);
|
Update to current master.
`IPython.notebook.notebook_path` now returns the complete path including the notebook name.
|
// convert current notebook to html by calling "ipython nbconvert" and open static html file in new tab
define([
'base/js/namespace',
'jquery',
], function(IPython, $) {
"use strict";
if (IPython.version[0] != 3) {
console.log("This extension requires IPython 3.x")
return
}
var nbconvertPrintView = function () {
var kernel = IPython.notebook.kernel;
var path = IPython.notebook.notebook_path;
var command = 'ip=get_ipython(); import os; os.system(\"ipython nbconvert --profile=%s --to html ' + name + '\" % ip.profile)';
function callback(out_type, out_data) {
var url = '/files/' + path.split('.ipynb')[0] + '.html';
var win=window.open(url, '_blank');
win.focus();
}
kernel.execute(command, { shell: { reply : callback } });
};
IPython.toolbar.add_buttons_group([
{
id : 'doPrintView',
label : 'Create static print view',
icon : 'fa-print',
callback : nbconvertPrintView
}
])
})
|
// convert current notebook to html by calling "ipython nbconvert" and open static html file in new tab
"using strict";
nbconvertPrintView = function(){
var kernel = IPython.notebook.kernel;
var name = IPython.notebook.notebook_name;
var path = IPython.notebook.notebook_path;
if (path.length > 0) { path = path.concat('/'); }
var command = 'ip=get_ipython(); import os; os.system(\"ipython nbconvert --profile=%s --to html ' + name + '\" % ip.profile)';
function callback(out_type, out_data)
{
var url = '/files/' + path + name.split('.ipynb')[0] + '.html';
var win=window.open(url, '_blank');
win.focus();
}
kernel.execute(command, { shell: { reply : callback } });
};
IPython.toolbar.add_buttons_group([
{
id : 'doPrintView',
label : 'Create static print view',
icon : 'fa-print',
callback : nbconvertPrintView
}
]);
|
Add SNS topic for event NewCorsSiteRequestReceived
|
from amazonia.classes.sns import SNS
from troposphere import Ref, Join, cloudwatch
from troposphere.sns import Topic, Subscription
def user_registration_topic(emails):
return topic("UserRegistrationReceived", emails)
def new_cors_site_request_received_topic(emails):
return topic("NewCorsSiteRequestReceived", emails)
def topic(topic_title, emails):
topic = Topic(topic_title,
DisplayName=Join("", [Ref("AWS::StackName"), "-", topic_title]))
topic.Subscription = []
for index, email in enumerate(emails):
topic.Subscription.append(Subscription(
topic_title + "Subscription" + str(index),
Endpoint=email,
Protocol="email"))
return topic
def customise_stack_template(template):
template.add_resource(user_registration_topic([]))
template.add_resource(new_cors_site_request_received_topic([]))
return template
|
from amazonia.classes.sns import SNS
from troposphere import Ref, Join, cloudwatch
from troposphere.sns import Topic, Subscription
def user_registration_topic(emails):
return topic("UserRegistrationReceived", emails)
def topic(topic_title, emails):
topic = Topic(topic_title,
DisplayName=Join("", [Ref("AWS::StackName"), "-", topic_title]))
topic.Subscription = []
for index, email in enumerate(emails):
topic.Subscription.append(Subscription(
topic_title + "Subscription" + str(index),
Endpoint=email,
Protocol="email"))
return topic
def customise_stack_template(template):
template.add_resource(user_registration_topic([]))
return template
|
Fix version number to be higher
|
from setuptools import setup
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name = 'invokust',
version = '0.71',
author = 'Max Williams',
author_email = 'futuresharks@gmail.com',
description = 'A small wrapper for locust to allow running load tests from within Python or on AWS Lambda',
long_description=long_description,
long_description_content_type="text/markdown",
url = 'https://github.com/FutureSharks/invokust',
download_url = 'https://github.com/FutureSharks/invokust/archive/0.71.tar.gz',
license = 'MIT',
scripts = ['invokr.py'],
packages = [
'invokust',
'invokust.aws_lambda',
],
install_requires = [
'locustio==0.13.5',
'boto3',
'pyzmq',
'numpy'
],
keywords = ['testing', 'loadtest', 'lamba', 'locust'],
classifiers = [
'Topic :: Software Development :: Quality Assurance',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Testing :: Traffic Generation',
'Programming Language :: Python :: 3.6'
],
)
|
from setuptools import setup
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name = 'invokust',
version = '0.7',
author = 'Max Williams',
author_email = 'futuresharks@gmail.com',
description = 'A small wrapper for locust to allow running load tests from within Python or on AWS Lambda',
long_description=long_description,
long_description_content_type="text/markdown",
url = 'https://github.com/FutureSharks/invokust',
download_url = 'https://github.com/FutureSharks/invokust/archive/0.7.tar.gz',
license = 'MIT',
scripts = ['invokr.py'],
packages = [
'invokust',
'invokust.aws_lambda',
],
install_requires = [
'locustio==0.13.5',
'boto3',
'pyzmq',
'numpy'
],
keywords = ['testing', 'loadtest', 'lamba', 'locust'],
classifiers = [
'Topic :: Software Development :: Quality Assurance',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Testing :: Traffic Generation',
'Programming Language :: Python :: 3.6'
],
)
|
Switch from "display:none" to "visibility:hidden".
That way the page layout won't be affected but it will still be hidden.
|
(function () {
"use strict";
var settings, visibility;
if (window !== window.top) {
return;
}
safari.self.tab.dispatchMessage('getSettings');
safari.self.addEventListener('message', function (event) {
if (event.name === 'settings') {
settings = event.message;
if (settings.blacklist.indexOf(window.location.hostname) !== -1) {
visibility = document.documentElement.style.visibility;
document.documentElement.style.visibility = 'hidden';
window.setTimeout(function () {
document.documentElement.style.visibility = visibility;
}, 1000 * (settings.delay - settings.jitter + (Math.random() * 2 * settings.jitter)));
}
}
}, false);
}());
|
(function () {
"use strict";
var settings, display;
if (window !== window.top) {
return;
}
safari.self.tab.dispatchMessage('getSettings');
safari.self.addEventListener('message', function (event) {
if (event.name === 'settings') {
settings = event.message;
if (settings.blacklist.indexOf(window.location.hostname) !== -1) {
display = document.documentElement.style.display;
document.documentElement.style.display = 'none';
window.setTimeout(function () {
document.documentElement.style.display = display;
}, 1000 * (settings.delay - settings.jitter + (Math.random() * 2 * settings.jitter)));
}
}
}, false);
}());
|
Fix path for static files
|
package webserver
import (
"log"
"github.com/gin-gonic/contrib/static"
"github.com/gin-gonic/gin"
cors "github.com/tommy351/gin-cors"
)
func Run(listen string) {
gin.SetMode(gin.DebugMode)
r := gin.New()
r.Use(gin.Recovery())
r.Use(cors.Middleware(cors.Options{
AllowOrigins: []string{"*"},
AllowHeaders: []string{"x-auth-token", "content-type"},
}))
apiEndpoints := r.Group("/api/v1")
{
apiEndpoints.GET("/puzzles", puzzlesByPositionAndDistance)
apiEndpoints.GET("/remaining-time", remainingTime)
apiEndpoints.PUT("/start", startGame)
apiEndpoints.PUT("/abort", abortGame)
apiEndpoints.PUT("/finish", finishGame)
}
r.Use(static.Serve("/", static.LocalFile("frontend/dist", false)))
if error := r.Run(listen); error != nil {
log.Fatal(error)
}
}
|
package webserver
import (
"log"
"github.com/gin-gonic/contrib/static"
"github.com/gin-gonic/gin"
cors "github.com/tommy351/gin-cors"
)
func Run(listen string) {
gin.SetMode(gin.DebugMode)
r := gin.New()
r.Use(gin.Recovery())
r.Use(cors.Middleware(cors.Options{
AllowOrigins: []string{"*"},
AllowHeaders: []string{"x-auth-token", "content-type"},
}))
apiEndpoints := r.Group("/api/v1")
{
apiEndpoints.GET("/puzzles", puzzlesByPositionAndDistance)
apiEndpoints.GET("/remaining-time", remainingTime)
apiEndpoints.PUT("/start", startGame)
apiEndpoints.PUT("/abort", abortGame)
apiEndpoints.PUT("/finish", finishGame)
}
r.Use(static.Serve("/", static.LocalFile("static", false)))
if error := r.Run(listen); error != nil {
log.Fatal(error)
}
}
|
Handle greasemonkey matching differently than ext
|
// ==UserScript==
// @name Bring Stars Back (Netflix)
// @namespace theinternetftw.com
// @include https://www.netflix.com/
// @include https://www.netflix.com/browse
// @include https://www.netflix.com/browse/*
// @include https://www.netflix.com/title/*
// @version 1.0.4
// @grant GM_xmlhttpRequest
// @grant GM_setValue
// @grant GM_getValue
// ==/UserScript==
function loadOptions(onLoad) {
return false; // edit bsbOptions by hand in the greasemonkey script.
}
function remoteGet(url, onLoad) {
GM_xmlhttpRequest({
url: url,
method: 'GET',
onload: function(resp) {
if (resp.status != 200) {
console.log('[bsb] bad status ' + resp.status + ' when loading ' + url);
onLoad(new DOMParser().parseFromString('', 'text/html'));
} else {
onLoad(resp.responseXML || new DOMParser().parseFromString(resp.responseText, 'text/html'));
}
},
});
}
function loadRatingsCache(onLoad) {
var cacheJSON = GM_getValue('bsbRatingsCache');
onLoad(cacheJSON ? JSON.parse(cacheJSON) : {});
}
function saveRatingsCache(cache) {
GM_setValue('bsbRatingsCache', JSON.stringify(cache));
}
|
// ==UserScript==
// @name Bring Stars Back (Netflix)
// @namespace theinternetftw.com
// @include https://www.netflix.com/
// @include https://www.netflix.com/browse/*
// @include https://www.netflix.com/title/*
// @version 1.0.4
// @grant GM_xmlhttpRequest
// @grant GM_setValue
// @grant GM_getValue
// ==/UserScript==
function loadOptions(onLoad) {
return false; // edit bsbOptions by hand in the greasemonkey script.
}
function remoteGet(url, onLoad) {
GM_xmlhttpRequest({
url: url,
method: 'GET',
onload: function(resp) {
if (resp.status != 200) {
console.log('[bsb] bad status ' + resp.status + ' when loading ' + url);
onLoad(new DOMParser().parseFromString('', 'text/html'));
} else {
onLoad(resp.responseXML || new DOMParser().parseFromString(resp.responseText, 'text/html'));
}
},
});
}
function loadRatingsCache(onLoad) {
var cacheJSON = GM_getValue('bsbRatingsCache');
onLoad(cacheJSON ? JSON.parse(cacheJSON) : {});
}
function saveRatingsCache(cache) {
GM_setValue('bsbRatingsCache', JSON.stringify(cache));
}
|
Fix: Add encoding line for Python 3
|
# -*- coding: utf-8 -*-
import sys
import os
import operator
sys.path.insert(1, os.path.abspath('..'))
from wsinfo import Info
cnt = 0
max_cnt = 100
servers = {}
with open("urls.txt", "r") as f:
for url in f.readlines():
url = url[:-1]
try:
w = Info(url)
if w.server != "":
if not w.server in servers:
servers[w.server] = 1
else:
servers[w.server] += 1
print("{:35} {:15} {:3} {:15}".format(
w._url, w.ip, w.http_status_code, w.server))
except Exception as e:
print("{:35} {}".format(url, e))
cnt += 1
if cnt >= max_cnt:
break
print("="*80)
print("Web server ranking:")
rank = sorted(servers.items(), key=operator.itemgetter(1), reverse=True)
for n in range(len(rank)):
print("#{:2} {} ({})".format(n+1, rank[n][0], rank[n][1]))
|
import sys
import os
import operator
sys.path.insert(1, os.path.abspath('..'))
from wsinfo import Info
cnt = 0
max_cnt = 100
servers = {}
with open("urls.txt", "r") as f:
for url in f.readlines():
url = url[:-1]
try:
w = Info(url)
if w.server != "":
if not w.server in servers:
servers[w.server] = 1
else:
servers[w.server] += 1
print("{:35} {:15} {:3} {:15}".format(
w._url, w.ip, w.http_status_code, w.server))
except Exception as e:
print("{:30} {}".format(url, e))
cnt += 1
if cnt >= max_cnt:
break
print("="*80)
print("Web server ranking:")
rank = sorted(servers.items(), key=operator.itemgetter(1), reverse=True)
for n in range(10):
print("#{:2} {} ({})".format(n+1, rank[n][0], rank[n][1]))
|
Change the application name to 'Check Forbidden'
|
'''
cd dropbox/codes/check_forbidden
py -3.4 setup.py py2exe
Libraries used:
import tkinter
import tkinter.filedialog
import csv
import os
import re
from time import sleep
import zipfile
'''
from distutils.core import setup
import py2exe
setup(
console=[{'author': 'Shun Sakurai',
'dest_base': 'Check Forbidden',
'script': 'check_forbidden.py',
'version': '1.4.0',
}],
options={'py2exe': {
'bundle_files': 2,
'compressed': True,
'excludes': ['_hashlib', '_frozen_importlib', 'argparse', '_lzma', '_bz2', '_ssl', 'calendar', 'datetime', 'difflib', 'doctest', 'inspect', 'locale', 'optparse', 'pdb', 'pickle', 'pydoc', 'pyexpat', 'pyreadline'],
}}
)
|
'''
cd dropbox/codes/check_forbidden
py -3.4 setup.py py2exe
Libraries used:
import tkinter
import tkinter.filedialog
import csv
import os
import re
from time import sleep
import zipfile
'''
from distutils.core import setup
import py2exe
setup(
console=[{'author': 'Shun Sakurai',
'script': 'check_forbidden.py',
'version': '1.4.0',
}],
options={'py2exe': {
'bundle_files': 2,
'compressed': True,
'excludes': ['_hashlib', '_frozen_importlib', 'argparse', '_lzma', '_bz2', '_ssl', 'calendar', 'datetime', 'difflib', 'doctest', 'inspect', 'locale', 'optparse', 'pdb', 'pickle', 'pydoc', 'pyexpat', 'pyreadline'],
}}
)
|
Move Author Managers into an abstract base class
Copying of the default manager causes the source model to become poluted.
To supply additional managers without replacing the default manager,
the Django docs recommend inheriting from an abstract base class.
https://docs.djangoproject.com/en/dev/topics/db/models/#proxy-model-managers
|
"""Author model for Zinnia"""
from django.db import models
from django.contrib.auth import get_user_model
from django.utils.encoding import python_2_unicode_compatible
from zinnia.managers import entries_published
from zinnia.managers import EntryRelatedPublishedManager
class AuthorManagers(models.Model):
published = EntryRelatedPublishedManager()
class Meta:
abstract = True
@python_2_unicode_compatible
class Author(get_user_model(), AuthorManagers):
"""
Proxy model around :class:`django.contrib.auth.models.get_user_model`.
"""
def entries_published(self):
"""
Returns author's published entries.
"""
return entries_published(self.entries)
@models.permalink
def get_absolute_url(self):
"""
Builds and returns the author's URL based on his username.
"""
return ('zinnia_author_detail', [self.get_username()])
def __str__(self):
"""
If the user has a full name, use it instead of the username.
"""
return self.get_full_name() or self.get_username()
class Meta:
"""
Author's meta informations.
"""
app_label = 'zinnia'
proxy = True
|
"""Author model for Zinnia"""
from django.db import models
from django.contrib.auth import get_user_model
from django.utils.encoding import python_2_unicode_compatible
from zinnia.managers import entries_published
from zinnia.managers import EntryRelatedPublishedManager
@python_2_unicode_compatible
class Author(get_user_model()):
"""
Proxy model around :class:`django.contrib.auth.models.get_user_model`.
"""
objects = get_user_model()._default_manager
published = EntryRelatedPublishedManager()
def entries_published(self):
"""
Returns author's published entries.
"""
return entries_published(self.entries)
@models.permalink
def get_absolute_url(self):
"""
Builds and returns the author's URL based on his username.
"""
return ('zinnia_author_detail', [self.get_username()])
def __str__(self):
"""
If the user has a full name, use it instead of the username.
"""
return self.get_full_name() or self.get_username()
class Meta:
"""
Author's meta informations.
"""
app_label = 'zinnia'
proxy = True
|
Revert "Fix missing prepending extension"
This reverts commit fa787a65bf64820654b6b403f1a75a8772e6c56a.
|
<?php
declare(strict_types=1);
/*
* This file is part of the Sonata Project package.
*
* (c) Thomas Rabaix <thomas.rabaix@sonata-project.org>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
use PHPUnit\Framework\TestCase;
use Sonata\DoctrineORMAdminBundle\DependencyInjection\SonataDoctrineORMAdminExtension;
use Symfony\Component\DependencyInjection\ContainerBuilder;
class SonataDoctrineORMAdminExtensionTest extends TestCase
{
/**
* @var ContainerBuilder
*/
protected $configuration;
protected function tearDown(): void
{
unset($this->configuration);
}
public function testEntityManagerSetFactory(): void
{
$this->configuration = new ContainerBuilder();
$this->configuration->setParameter('kernel.bundles', ['SimpleThingsEntityAuditBundle' => true]);
$loader = new SonataDoctrineORMAdminExtension();
$loader->load([], $this->configuration);
$definition = $this->configuration->getDefinition('sonata.admin.entity_manager');
$this->assertNotNull($definition->getFactory());
$this->assertNotFalse($this->configuration->getParameter('sonata_doctrine_orm_admin.audit.force'));
}
}
|
<?php
declare(strict_types=1);
/*
* This file is part of the Sonata Project package.
*
* (c) Thomas Rabaix <thomas.rabaix@sonata-project.org>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
use PHPUnit\Framework\TestCase;
use Sonata\AdminBundle\DependencyInjection\SonataAdminExtension;
use Sonata\DoctrineORMAdminBundle\DependencyInjection\SonataDoctrineORMAdminExtension;
use Symfony\Component\DependencyInjection\ContainerBuilder;
class SonataDoctrineORMAdminExtensionTest extends TestCase
{
/**
* @var ContainerBuilder
*/
protected $configuration;
protected function tearDown(): void
{
unset($this->configuration);
}
public function testEntityManagerSetFactory(): void
{
$this->configuration = new ContainerBuilder();
$this->configuration->setParameter('kernel.bundles', ['SimpleThingsEntityAuditBundle' => true]);
$adminBundleExtension = new SonataAdminExtension();
$adminBundleExtension->prepend($this->configuration);
$loader = new SonataDoctrineORMAdminExtension();
$loader->load([], $this->configuration);
$definition = $this->configuration->getDefinition('sonata.admin.entity_manager');
$this->assertNotNull($definition->getFactory());
$this->assertNotFalse($this->configuration->getParameter('sonata_doctrine_orm_admin.audit.force'));
}
}
|
Increase timeout to improve test consistency
|
/* eslint-disable no-console */
const {
ServiceBroker,
Transporters: { AMQP: AmqpTransport }
} = require("../../../..");
const AMQP_URL = process.env.AMQP_URI || "amqp://guest:guest@localhost:5672";
const amqpTransporter = new AmqpTransport(AMQP_URL);
const broker = new ServiceBroker({
nodeID: "slow-nodeID",
logger: console,
transporter: amqpTransporter,
});
broker.createService({
name: "testing",
actions: {
hello: {
params: {
cmd: { type: "string" }
},
handler(ctx) {
console.log("slowWorker responding to", ctx.params.cmd);
return new Promise((resolve) => {
setTimeout(() => {
resolve({ msg: ctx.params.cmd, from: "slowWorker" });
}, 10000);
});
},
},
},
});
broker.start();
setTimeout(() => process.exit(1), 10000);
|
/* eslint-disable no-console */
const {
ServiceBroker,
Transporters: { AMQP: AmqpTransport }
} = require("../../../..");
const AMQP_URL = process.env.AMQP_URI || "amqp://guest:guest@localhost:5672";
const amqpTransporter = new AmqpTransport(AMQP_URL);
const broker = new ServiceBroker({
nodeID: "slow-nodeID",
logger: console,
transporter: amqpTransporter,
});
broker.createService({
name: "testing",
actions: {
hello: {
params: {
cmd: { type: "string" }
},
handler(ctx) {
console.log("slowWorker responding to", ctx.params.cmd);
return new Promise((resolve) => {
setTimeout(() => {
resolve({ msg: ctx.params.cmd, from: "slowWorker" });
}, 4000);
});
},
},
},
});
broker.start();
setTimeout(() => process.exit(1), 10000);
|
Remove explicit OT dep; we get it via basictracer
|
from setuptools import setup, find_packages
setup(
name='lightstep',
version='2.2.0',
description='LightStep Python OpenTracing Implementation',
long_description='',
author='LightStep',
license='',
install_requires=['thrift==0.9.2',
'jsonpickle',
'pytest',
'basictracer>=2.2,<2.3'],
tests_require=['sphinx',
'sphinx-epytext'],
classifiers=[
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
],
keywords=[ 'opentracing', 'lightstep', 'traceguide', 'tracing', 'microservices', 'distributed' ],
packages=find_packages(exclude=['docs*', 'tests*', 'sample*']),
)
|
from setuptools import setup, find_packages
setup(
name='lightstep',
version='2.2.0',
description='LightStep Python OpenTracing Implementation',
long_description='',
author='LightStep',
license='',
install_requires=['thrift==0.9.2',
'jsonpickle',
'pytest',
'basictracer>=2.2,<2.3',
'opentracing>=1.2,<1.3'],
tests_require=['sphinx',
'sphinx-epytext'],
classifiers=[
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
],
keywords=[ 'opentracing', 'lightstep', 'traceguide', 'tracing', 'microservices', 'distributed' ],
packages=find_packages(exclude=['docs*', 'tests*', 'sample*']),
)
|
Fix StringStream to conform to latest pypy
Signed-off-by: Stefan Marr <46f1a0bd5592a2f9244ca321b129902a06b53e03@stefan-marr.de>
|
from rpython.rlib.streamio import Stream, StreamError
class StringStream(Stream):
def __init__(self, string):
self._string = string
self.pos = 0
self.max = len(string) - 1
def write(self, data):
raise StreamError("StringStream is not writable")
def truncate(self, size):
raise StreamError("StringStream is immutable")
def tell(self):
return self.pos
def seek(self, offset, whence):
if whence == 0:
self.pos = max(0, offset)
elif whence == 1:
self.pos = max(0, self.pos + offset)
elif whence == 2:
self.pos = max(0, self.max + offset)
else:
raise StreamError("seek(): whence must be 0, 1 or 2")
def read(self, n):
assert isinstance(n, int)
end = self.pos + n
assert end >= 0
data = self._string[self.pos:end]
self.pos += len(data)
return data
|
from rpython.rlib.streamio import Stream, StreamError
class StringStream(Stream):
def __init__(self, string):
self._string = string
self.pos = 0
self.max = len(string) - 1
def write(self, data):
raise StreamError("StringStream is not writable")
def truncate(self, size):
raise StreamError("StringStream is immutable")
def peek(self):
if self.pos < self.max:
return self._string[self.pos:]
else:
return ''
def tell(self):
return self.pos
def seek(self, offset, whence):
if whence == 0:
self.pos = max(0, offset)
elif whence == 1:
self.pos = max(0, self.pos + offset)
elif whence == 2:
self.pos = max(0, self.max + offset)
else:
raise StreamError("seek(): whence must be 0, 1 or 2")
def read(self, n):
assert isinstance(n, int)
end = self.pos + n
data = self._string[self.pos:end]
self.pos += len(data)
return data
|
Fix bug in migration script
|
"""Added column 'weight' to Banner, Logo and Background.
Revision ID: 587c89cfa8ea
Revises: 2c0bfc379e01
Create Date: 2012-05-11 14:36:15.518757
"""
# downgrade revision identifier, used by Alembic.
revision = '587c89cfa8ea'
down_revision = '2c0bfc379e01'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('files', sa.Column('weight',
sa.Integer(),
nullable=True,
default=0))
connection = op.get_bind()
connection.execute('UPDATE files SET weight=0')
op.alter_column('files',
'weight',
existing_type=sa.Integer,
nullable=False)
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('files', 'weight')
### end Alembic commands ###
|
"""Added column 'weight' to Banner, Logo and Background.
Revision ID: 587c89cfa8ea
Revises: 2c0bfc379e01
Create Date: 2012-05-11 14:36:15.518757
"""
# downgrade revision identifier, used by Alembic.
revision = '587c89cfa8ea'
down_revision = '2c0bfc379e01'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('files', sa.Column('weight', sa.Integer(),
nullable=False, default=0))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('files', 'weight')
### end Alembic commands ###
|
Remove /submit endpoint from UI blueprint for now
|
# -*- coding: utf-8 -*-
# import nacl.exceptions
# import nacl.signing
from flask import Blueprint, current_app, render_template
# from nacl.encoding import URLSafeBase64Encoder
# from nacl.signing import VerifyKey
__all__ = [
'ui',
]
ui = Blueprint('ui', __name__)
@ui.route('/')
def home():
"""Show the user the home view."""
payload = current_app.manager.make_payload()
return render_template('index.html', payload=payload)
# @ui.route('/submit', methods=['POST'])
# def submit():
# """Show the submit view."""
# location_id = int(request.headers['authorization'])
#
# location = current_app.manager.get_location_by_id(location_id)
#
# if not location.activated:
# return abort(403, f'Not activated: {location}')
#
# key = location.pubkey
#
# try:
# verify_key = VerifyKey(key=key, encoder=URLSafeBase64Encoder)
# payload = verify_key.verify(request.data, encoder=URLSafeBase64Encoder)
#
# except nacl.exceptions.BadSignatureError as e:
# return abort(403, f'Bad Signature: {e}')
#
# decoded_payload = json.loads(base64.urlsafe_b64decode(payload).decode('utf-8'))
# current_app.manager.insert_payload(decoded_payload)
#
# return '', 204
|
# -*- coding: utf-8 -*-
import base64
import json
from nacl.encoding import URLSafeBase64Encoder
import nacl.exceptions
import nacl.signing
from flask import Blueprint, abort, current_app, render_template, request
from nacl.signing import VerifyKey
__all__ = [
'ui',
]
ui = Blueprint('ui', __name__)
@ui.route('/')
def home():
"""Show the user the home view."""
payload = current_app.manager.make_payload()
return render_template('index.html', payload=payload)
@ui.route('/submit', methods=['POST'])
def submit():
"""Show the submit view."""
location_id = int(request.headers['authorization'])
location = current_app.manager.get_location_by_id(location_id)
if not location.activated:
return abort(403, f'Not activated: {location}')
key = location.pubkey
try:
verify_key = VerifyKey(key=key, encoder=URLSafeBase64Encoder)
payload = verify_key.verify(request.data, encoder=URLSafeBase64Encoder)
except nacl.exceptions.BadSignatureError as e:
return abort(403, f'Bad Signature: {e}')
decoded_payload = json.loads(base64.urlsafe_b64decode(payload).decode('utf-8'))
current_app.manager.insert_payload(decoded_payload)
return '', 204
|
Add UpAllCommand to Mongrate application.
|
<?php
namespace Mongrate\MongrateBundle;
use Symfony\Component\HttpKernel\Bundle\Bundle;
use Symfony\Component\Console\Application;
class MongrateBundle extends Bundle
{
public function registerCommands(Application $application)
{
$config = $application->getKernel()
->getContainer()
->getParameter('mongrate_bundle');
$application->add(new \Mongrate\MongrateBundle\Command\ToggleMigrationCommand($config));
$application->add(new \Mongrate\MongrateBundle\Command\GenerateMigrationCommand($config));
$application->add(new \Mongrate\MongrateBundle\Command\UpCommand($config));
$application->add(new \Mongrate\MongrateBundle\Command\UpAllCommand($config));
$application->add(new \Mongrate\MongrateBundle\Command\DownCommand($config));
$application->add(new \Mongrate\MongrateBundle\Command\ListCommand($config));
$application->add(new \Mongrate\MongrateBundle\Command\TestCommand($config));
}
}
|
<?php
namespace Mongrate\MongrateBundle;
use Symfony\Component\HttpKernel\Bundle\Bundle;
use Symfony\Component\Console\Application;
class MongrateBundle extends Bundle
{
public function registerCommands(Application $application)
{
$config = $application->getKernel()
->getContainer()
->getParameter('mongrate_bundle');
$application->add(new \Mongrate\MongrateBundle\Command\ToggleMigrationCommand($config));
$application->add(new \Mongrate\MongrateBundle\Command\GenerateMigrationCommand($config));
$application->add(new \Mongrate\MongrateBundle\Command\UpCommand($config));
$application->add(new \Mongrate\MongrateBundle\Command\DownCommand($config));
$application->add(new \Mongrate\MongrateBundle\Command\ListCommand($config));
$application->add(new \Mongrate\MongrateBundle\Command\TestCommand($config));
}
}
|
Remove unnecessary properties from index
|
package io.georocket.index.generic;
import java.util.HashMap;
import java.util.Map;
import io.georocket.index.xml.MetaIndexer;
import io.georocket.storage.ChunkMeta;
import io.georocket.storage.IndexMeta;
/**
* Default implementation of {@link MetaIndexer} that extracts generic
* attributes from chunk metadata and adds it to the index.
* @author Michel Kraemer
*/
public class DefaultMetaIndexer implements MetaIndexer {
private final Map<String, Object> result = new HashMap<>();
@Override
public Map<String, Object> getResult() {
return result;
}
@Override
public void onIndexChunk(String path, ChunkMeta chunkMeta,
IndexMeta indexMeta) {
result.put("path", path);
result.put("chunkMeta", chunkMeta.toJsonObject());
if (indexMeta.getTags() != null) {
result.put("tags", indexMeta.getTags());
}
}
}
|
package io.georocket.index.generic;
import java.util.HashMap;
import java.util.Map;
import io.georocket.index.xml.MetaIndexer;
import io.georocket.storage.ChunkMeta;
import io.georocket.storage.IndexMeta;
/**
* Default implementation of {@link MetaIndexer} that extracts generic
* attributes from chunk metadata and adds it to the index.
* @author Michel Kraemer
*/
public class DefaultMetaIndexer implements MetaIndexer {
private final Map<String, Object> result = new HashMap<>();
@Override
public Map<String, Object> getResult() {
return result;
}
@Override
public void onIndexChunk(String path, ChunkMeta chunkMeta,
IndexMeta indexMeta) {
result.put("path", path);
result.put("correlationId", indexMeta.getCorrelationId());
result.put("filename", indexMeta.getFilename());
result.put("timestamp", indexMeta.getTimestamp());
result.put("chunkMeta", chunkMeta.toJsonObject());
if (indexMeta.getTags() != null) {
result.put("tags", indexMeta.getTags());
}
}
}
|
Convert permission IDs into strings
|
<?php namespace Flarum\Core\Groups;
use Flarum\Core\Model;
use Illuminate\Database\Eloquent\Builder;
/**
* @todo document database columns with @property
*/
class Permission extends Model
{
/**
* {@inheritdoc}
*/
protected $table = 'permissions';
/**
* Define the relationship with the group that this permission is for.
*
* @return \Illuminate\Database\Eloquent\Relations\BelongsTo
*/
public function group()
{
return $this->belongsTo('Flarum\Core\Groups\Group', 'group_id');
}
/**
* Set the keys for a save update query.
*
* @param Builder $query
* @return Builder
*/
protected function setKeysForSaveQuery(Builder $query)
{
$query->where('group_id', $this->group_id)
->where('permission', $this->permission);
return $query;
}
/**
* Get a map of permissions to the group IDs that have them.
*
* @return array[]
*/
public static function map()
{
$permissions = [];
foreach (static::get() as $permission) {
$permissions[$permission->permission][] = (string) $permission->group_id;
}
return $permissions;
}
}
|
<?php namespace Flarum\Core\Groups;
use Flarum\Core\Model;
use Illuminate\Database\Eloquent\Builder;
/**
* @todo document database columns with @property
*/
class Permission extends Model
{
/**
* {@inheritdoc}
*/
protected $table = 'permissions';
/**
* Define the relationship with the group that this permission is for.
*
* @return \Illuminate\Database\Eloquent\Relations\BelongsTo
*/
public function group()
{
return $this->belongsTo('Flarum\Core\Groups\Group', 'group_id');
}
/**
* Set the keys for a save update query.
*
* @param Builder $query
* @return Builder
*/
protected function setKeysForSaveQuery(Builder $query)
{
$query->where('group_id', $this->group_id)
->where('permission', $this->permission);
return $query;
}
/**
* Get a map of permissions to the group IDs that have them.
*
* @return array[]
*/
public static function map()
{
$permissions = [];
foreach (static::get() as $permission) {
$permissions[$permission->permission][] = $permission->group_id;
}
return $permissions;
}
}
|
Apply votes quietly every 5 seconds
|
var countVotes = function () {
"use strict";
// find votes for object
var deltaVotesQuery = {
delta: {
$exists: true
}
};
var deltaVotes = Votes.find(deltaVotesQuery).fetch();
var voteTable = {};
// count how many votes each object has
_.forEach(deltaVotes, function (vote) {
var post = voteTable[vote.obj];
if (typeof post === 'undefined') {
// first vote for this post
voteTable[vote.obj] = vote.delta;
} else {
// not first vote for the post
voteTable[vote.obj] += vote.delta;
}
Votes.update({
_id: vote._id
}, {
$unset : {
delta: ''
}
});
});
var i = 0;
_.forEach(voteTable, function (value, key) {
Posts.update({_id: key}, { $inc: {votes: value}});
i++;
});
if (i > 0) {
var str = 'Applying ' + i + ' vote';
if (i > 1) {
str += 's';
}
console.log(str);
}
};
Meteor.startup(countVotes);
// run every 5 seconds
Meteor.setInterval(countVotes, 5 * 1000);
|
var countVotes = function () {
"use strict";
// find votes for object
var deltaVotesQuery = {
delta: {
$exists: true
}
};
var deltaVotes = Votes.find(deltaVotesQuery).fetch();
var voteTable = {};
// count how many votes each object has
_.forEach(deltaVotes, function (vote) {
var post = voteTable[vote.obj];
if (typeof post === 'undefined') {
// first vote for this post
voteTable[vote.obj] = vote.delta;
} else {
// not first vote for the post
voteTable[vote.obj] += vote.delta;
}
Votes.update({
_id: vote._id
}, {
$unset : {
delta: ''
}
});
});
_.forEach(voteTable, function (value, key) {
Posts.update({_id: key}, { $inc: {votes: value}});
});
console.log('Counting votes:', voteTable);
};
Meteor.startup(countVotes);
// run every 2 minutes
Meteor.setInterval(countVotes, 2*1000);
|
Add a bit more info about rational surfaces
|
import sqlite3
conn = sqlite3.connect('surfaces.db')
c = conn.cursor()
c.execute('''CREATE TABLE bibliography (
kdim INT,
pg INT,
q INT,
K2 INT,
chi INT,
e INT,
h11 INT,
sp INT,
ref TEXT
);
''')
rationalsurfaces = [(-1, 0, 0, 9, 3, 1, 0, 7, '''The projective plane, birational to the Hirzebruch surface $\\Sigma_{0} = \\mathbb{P}^{1} \\times \mathbb{P}^{1}$.''')]
for n in range(2,60):
h11 = n+1
e = n+3
K2 = 12 - e
rationalsurfaces.append((-1, 0, 0, K2, e, h11, n, 7, "The Hirzebruch surface $\\Sigma_{" + str(n) + "}$."))
c.executemany("INSERT INTO bibliography VALUES (?,?,?,?,?,?,?,?,?)", rationalsurfaces)
c.close()
conn.commit()
|
import sqlite3
conn = sqlite3.connect('surfaces.db')
c = conn.cursor()
c.execute('''CREATE TABLE bibliography (
kdim INT,
pg INT,
q INT,
K2 INT,
chi INT,
e INT,
h11 INT,
sp INT,
ref TEXT
);
''')
rationalsurfaces = [(-1, 0, 0, "NULL", "NULL", "NULL", 0, 4, '''The projective plane, birational to the Hirzebruch surface $\\Sigma_{0} = \\mathbb{P}^{1} \\times \mathbb{P}^{1}$.''')]
for n in range(2,60):
rationalsurfaces.append((-1, 0, 0, "NULL", "NULL", "NULL", n, 4, "The Hirzebruch surface $\\Sigma_{" + str(n) + "}$."))
c.executemany("INSERT INTO bibliography VALUES (?,?,?,?,?,?,?,?,?)", rationalsurfaces)
c.close()
conn.commit()
|
Allow to render layout resources as smarty templates
|
<?php
class CM_Response_Resource_Layout extends CM_Response_Resource_Abstract {
protected function _process() {
$content = null;
$mimeType = null;
if ($pathRaw = $this->getRender()->getLayoutPath('resource/' . $this->getRequest()->getPath(), null, true, false)) {
$file = new CM_File($pathRaw);
$content = $file->read();
$mimeType = $file->getMimeType();
} elseif ($pathTpl = $this->getRender()->getLayoutPath('resource/' . $this->getRequest()->getPath() . '.tpl', null, true, false)) {
$content = $this->getRender()->fetchTemplate($pathTpl);
$mimeType = CM_File::getMimeTypeByContent($content);
} else {
throw new CM_Exception_Nonexistent('Invalid filename: `' . $this->getRequest()->getPath() . '`', null,
array('severity' => CM_Exception::WARN));
}
$this->enableCache();
$this->setHeader('Content-Type', $mimeType);
$this->_setContent($content);
}
public static function match(CM_Request_Abstract $request) {
return $request->getPathPart(0) === 'layout';
}
}
|
<?php
class CM_Response_Resource_Layout extends CM_Response_Resource_Abstract {
protected function _process() {
$file = null;
if ($path = $this->getRender()->getLayoutPath('resource/' . $this->getRequest()->getPath(), null, true, false)) {
$file = new CM_File($path);
}
if (!$file) {
throw new CM_Exception_Nonexistent('Invalid filename: `' . $this->getRequest()->getPath() . '`', null, array('severity' => CM_Exception::WARN));
}
$this->enableCache();
$this->setHeader('Content-Type', $file->getMimeType());
$this->_setContent($file->read());
}
public static function match(CM_Request_Abstract $request) {
return $request->getPathPart(0) === 'layout';
}
}
|
Make directional light gizmo invisible
|
import M_Gizmo from './M_Gizmo';
import Arrow from '../../../low_level/primitives/Arrow';
import ClassicMaterial from '../../../low_level/materials/ClassicMaterial';
import M_Mesh from '../meshes/M_Mesh';
import Vector4 from '../../../low_level/math/Vector4';
export default class M_DirectionalLightGizmo extends M_Gizmo {
constructor(glBoostContext, length) {
super(glBoostContext, null, null);
this._init(glBoostContext, length);
this.isVisible = false;
this.baseColor = new Vector4(0.8, 0.8, 0, 1);
}
_init(glBoostContext, length) {
this._material = new ClassicMaterial(this._glBoostContext);
this._mesh = new M_Mesh(glBoostContext,
new Arrow(this._glBoostContext, length, 3),
this._material);
this.addChild(this._mesh);
}
set rotate(rotateVec3) {
this._mesh.rotate = rotateVec3;
}
get rotate() {
return this._mesh.rotate;
}
set baseColor(colorVec) {
this._material.baseColor = colorVec;
}
get baseColor() {
return this._material.baseColor;
}
}
|
import M_Gizmo from './M_Gizmo';
import Arrow from '../../../low_level/primitives/Arrow';
import ClassicMaterial from '../../../low_level/materials/ClassicMaterial';
import M_Mesh from '../meshes/M_Mesh';
import Vector4 from '../../../low_level/math/Vector4';
export default class M_DirectionalLightGizmo extends M_Gizmo {
constructor(glBoostContext, length) {
super(glBoostContext, null, null);
this._init(glBoostContext, length);
// this.isVisible = false;
this.baseColor = new Vector4(0.8, 0.8, 0, 1);
}
_init(glBoostContext, length) {
this._material = new ClassicMaterial(this._glBoostContext);
this._mesh = new M_Mesh(glBoostContext,
new Arrow(this._glBoostContext, length, 3),
this._material);
this.addChild(this._mesh);
}
set rotate(rotateVec3) {
this._mesh.rotate = rotateVec3;
}
get rotate() {
return this._mesh.rotate;
}
set baseColor(colorVec) {
this._material.baseColor = colorVec;
}
get baseColor() {
return this._material.baseColor;
}
}
|
[Bitbay] Use compact JSON instead of replacing whitespaces in String
|
package org.knowm.xchange.bitbay.v3.service;
import java.io.IOException;
import java.util.UUID;
import org.knowm.xchange.Exchange;
import org.knowm.xchange.bitbay.v3.dto.trade.BitbayUserTrades;
import org.knowm.xchange.bitbay.v3.dto.trade.BitbayUserTradesQuery;
import org.knowm.xchange.exceptions.ExchangeException;
import org.knowm.xchange.utils.ObjectMapperHelper;
/** @author walec51 */
public class BitbayTradeServiceRaw extends BitbayBaseService {
BitbayTradeServiceRaw(Exchange exchange) {
super(exchange);
}
public BitbayUserTrades getBitbayTransactions(BitbayUserTradesQuery query)
throws IOException, ExchangeException {
final String jsonQuery = ObjectMapperHelper.toCompactJSON(query);
final BitbayUserTrades response =
bitbayAuthenticated.getTransactionHistory(
apiKey, sign, exchange.getNonceFactory(), UUID.randomUUID(), jsonQuery);
checkError(response);
return response;
}
}
|
package org.knowm.xchange.bitbay.v3.service;
import java.io.IOException;
import java.util.UUID;
import java.util.regex.Pattern;
import org.knowm.xchange.Exchange;
import org.knowm.xchange.bitbay.v3.dto.trade.BitbayUserTrades;
import org.knowm.xchange.bitbay.v3.dto.trade.BitbayUserTradesQuery;
import org.knowm.xchange.exceptions.ExchangeException;
import org.knowm.xchange.utils.ObjectMapperHelper;
/** @author walec51 */
public class BitbayTradeServiceRaw extends BitbayBaseService {
private static final Pattern WHITESPACES = Pattern.compile("\\s\\s");
BitbayTradeServiceRaw(Exchange exchange) {
super(exchange);
}
public BitbayUserTrades getBitbayTransactions(BitbayUserTradesQuery query)
throws IOException, ExchangeException {
String jsonQuery = ObjectMapperHelper.toJSON(query);
jsonQuery = WHITESPACES.matcher(jsonQuery).replaceAll("");
BitbayUserTrades response =
bitbayAuthenticated.getTransactionHistory(
apiKey, sign, exchange.getNonceFactory(), UUID.randomUUID(), jsonQuery);
checkError(response);
return response;
}
}
|
Add tailing / to APT_REPO_BASE_URL
|
#
# Copyright 2012 Cisco Systems, Inc.
#
# Author: Soren Hansen <sorhanse@cisco.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from django.core.management.base import BaseCommand
from repomgmt.models import BuildNode, BuildRecord
class Command(BaseCommand):
args = ''
help = 'Processes the build queue'
def handle(self, **options):
if BuildRecord.pending_build_count() > 0:
bn = BuildNode.start_new()
br = BuildRecord.pick_build(bn)
bn.prepare(br)
bn.build(br)
|
#
# Copyright 2012 Cisco Systems, Inc.
#
# Author: Soren Hansen <sorhanse@cisco.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from django.core.management.base import BaseCommand
from repomgmt.models import BuildNode, BuildRecord
import repomgmt.utils
class Command(BaseCommand):
args = ''
help = 'Processes the build queue'
def handle(self, **options):
if BuildRecord.pending_build_count() > 0:
bn = BuildNode.start_new()
br = BuildRecord.pick_build(bn)
bn.prepare(br)
bn.build(br)
|
Update database close timeout value.
|
import thread
import time
from django.db import close_old_connections
class DatabaseConnectionMaintainer(object):
DB_TIMEOUT_SECONDS = 5*60
def __init__(self):
self.clients = set()
# self.device_to_protocol = {}
self.is_recent_db_change_occurred = False
self.delay_and_execute(self.DB_TIMEOUT_SECONDS, self.close_db_connection_if_needed)
def close_db_connection_if_needed(self):
if not self.is_recent_db_change_occurred:
close_old_connections()
print "db connection closed"
self.is_recent_db_change_occurred = False
self.delay_and_execute(self.DB_TIMEOUT_SECONDS, self.close_db_connection_if_needed)
def refresh_timeout(self):
self.is_recent_db_change_occurred = True
def delay_and_execute(self, timeout, callback):
thread.start_new_thread(self.periodical_task, (timeout, callback))
# noinspection PyMethodMayBeStatic
def periodical_task(self, timeout, callback):
time.sleep(timeout)
callback()
|
import thread
import time
from django.db import close_old_connections
class DatabaseConnectionMaintainer(object):
def __init__(self):
self.clients = set()
# self.device_to_protocol = {}
self.is_recent_db_change_occurred = False
self.delay_and_execute(3600, self.close_db_connection_if_needed)
def close_db_connection_if_needed(self):
if not self.is_recent_db_change_occurred:
close_old_connections()
print "db connection closed"
self.is_recent_db_change_occurred = False
self.delay_and_execute(3600, self.close_db_connection_if_needed)
def refresh_timeout(self):
self.is_recent_db_change_occurred = True
def delay_and_execute(self, timeout, callback):
thread.start_new_thread(self.periodical_task, (timeout, callback))
# noinspection PyMethodMayBeStatic
def periodical_task(self, timeout, callback):
time.sleep(timeout)
callback()
|
Fix typo in the file header.
|
<?php
/**
* Class Google\Site_Kit\Core\Assets\External\GoogleCharts
*
* @package Google\Site_Kit
* @copyright 2020 Google LLC
* @license https://www.apache.org/licenses/LICENSE-2.0 Apache License 2.0
* @link https://sitekit.withgoogle.com
*/
namespace Google\Site_Kit\Core\Assets\External;
use Google\Site_Kit\Context;
use Google\Site_Kit\Core\Assets\Script;
/**
* Class representing the Google Charts script.
*
* @since n.e.x.t
* @access private
* @ignore
*/
class GoogleCharts extends Script {
/**
* Registers the asset.
*
* @since n.e.x.t
*
* @param Context $context Plugin context.
*/
public function register( Context $context ) {
wp_register_script( // phpcs:ignore WordPress.WP.EnqueuedResourceParameters.MissingVersion
$this->handle,
'https://www.gstatic.com/charts/loader.js',
(array) $this->args['dependencies'],
null,
$this->args['in_footer']
);
wp_add_inline_script(
$this->handle,
'google.charts.load( "current", { packages: [ "corechart" ] } );'
);
}
}
|
<?php
/**
* Class Google\Site_Kit\Core\Assets\External\GoogleCharts
*
* @package Google\Site_Kit
* @copyright 2019 Google LLC
* @license https://www.apache.org/licenses/LICENSE-2.0 Apache License 2.0
* @link https://sitekit.withgoogle.com
*/
namespace Google\Site_Kit\Core\Assets\External;
use Google\Site_Kit\Context;
use Google\Site_Kit\Core\Assets\Script;
/**
* Class representing the Google Charts script.
*
* @since n.e.x.t
* @access private
* @ignore
*/
class GoogleCharts extends Script {
/**
* Registers the asset.
*
* @since n.e.x.t
*
* @param Context $context Plugin context.
*/
public function register( Context $context ) {
wp_register_script( // phpcs:ignore WordPress.WP.EnqueuedResourceParameters.MissingVersion
$this->handle,
'https://www.gstatic.com/charts/loader.js',
(array) $this->args['dependencies'],
null,
$this->args['in_footer']
);
wp_add_inline_script(
$this->handle,
'google.charts.load( "current", { packages: [ "corechart" ] } );'
);
}
}
|
Insert main body of code into function
|
import pandas as pd
import numpy as np
import operator
from sys import argv
import os
def extract( file_name ):
with open(file_name) as f:
for i,line in enumerate(f,1):
if "SCN" in line:
return i
def main(lta_name):
os.system('ltahdr -i'+ lta_name + '> lta_file.txt')
dictionary = {}
#lta_file = str(argv[1])
skipped_rows = extract('lta_file.txt')-1
header = pd.read_csv('lta_file.txt',skiprows=skipped_rows,delimiter=r"\s+")
flux = list(set(header["OBJECT"]))
#print flux
header['Nrecs'] = header['Nrecs'].astype(float)
for i in flux :
temp = header.loc[header.OBJECT==i,'Nrecs'].values
temp = np.mean(temp)
dictionary[i]=temp
#print dictionary
source = max(dictionary.iteritems(),key=operator.itemgetter(1))[0]
return source
|
import pandas as pd
import numpy as np
import operator
from sys import argv
import os
def extract( file_name ):
with open(file_name) as f:
for i,line in enumerate(f,1):
if "SCN" in line:
return i
os.system('ltahdr -i'+ argv[1]+ '> lta_file.txt')
dictionary = {}
#lta_file = str(argv[1])
skipped_rows = extract('lta_file.txt')-1
header = pd.read_csv('lta_file.txt',skiprows=skipped_rows,delimiter=r"\s+")
flux = list(set(header["OBJECT"]))
print flux
header['Nrecs'] = header['Nrecs'].astype(float)
for i in flux :
temp = header.loc[header.OBJECT==i,'Nrecs'].values
temp = np.mean(temp)
dictionary[i]=temp
print dictionary
source = max(dictionary.iteritems(),key=operator.itemgetter(1))[0]
print source
|
Add main_image to BlogPost API response.
|
from apps.bluebottle_utils.serializers import SorlImageField, SlugHyperlinkedIdentityField
from django.contrib.auth.models import User
from fluent_contents.rendering import render_placeholder
from rest_framework import serializers
from .models import BlogPost
class BlogPostContentsField(serializers.Field):
def to_native(self, obj):
request = self.context.get('request', None)
contents_html = render_placeholder(request, obj)
return contents_html
class BlogPostAuthorSerializer(serializers.ModelSerializer):
picture = SorlImageField('userprofile.picture', '90x90', crop='center')
class Meta:
model = User
fields = ('first_name', 'last_name', 'picture')
class BlogPostDetailSerializer(serializers.ModelSerializer):
contents = BlogPostContentsField(source='contents')
author = BlogPostAuthorSerializer()
url = SlugHyperlinkedIdentityField(view_name='blogpost-instance')
main_image = SorlImageField('main_image', '300x200', crop='center')
class Meta:
model = BlogPost
exclude = ('id',)
class BlogPostPreviewSerializer(BlogPostDetailSerializer):
class Meta:
model = BlogPost
exclude = ('id',)
|
from apps.bluebottle_utils.serializers import SorlImageField, SlugHyperlinkedIdentityField
from django.contrib.auth.models import User
from fluent_contents.rendering import render_placeholder
from rest_framework import serializers
from .models import BlogPost
class BlogPostContentsField(serializers.Field):
def to_native(self, obj):
request = self.context.get('request', None)
contents_html = render_placeholder(request, obj)
return contents_html
class BlogPostAuthorSerializer(serializers.ModelSerializer):
picture = SorlImageField('userprofile.picture', '90x90', crop='center')
class Meta:
model = User
fields = ('first_name', 'last_name', 'picture')
class BlogPostDetailSerializer(serializers.ModelSerializer):
contents = BlogPostContentsField('contents')
author = BlogPostAuthorSerializer()
url = SlugHyperlinkedIdentityField(view_name='blogpost-instance')
class Meta:
model = BlogPost
exclude = ('id',)
class BlogPostPreviewSerializer(BlogPostDetailSerializer):
class Meta:
model = BlogPost
exclude = ('id',)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.