text
stringlengths 16
4.96k
| positive
stringlengths 321
2.24k
| negative
stringlengths 310
2.21k
|
|---|---|---|
[AC-9452] Fix image field import and migration
|
# Generated by Django 2.2.28 on 2022-04-20 13:05
import sorl.thumbnail.fields
from django.db import (
migrations,
models,
)
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0098_update_startup_update_20220408_0441'),
]
operations = [
migrations.AddField(
model_name='program',
name='hubspot_url',
field=models.URLField(blank=True, null=True),
),
migrations.AddField(
model_name='program',
name='program_image',
field=sorl.thumbnail.fields.ImageField(
null=True,
upload_to='program_images'),
),
]
|
# Generated by Django 2.2.28 on 2022-04-20 13:05
from django.db import (
migrations,
models,
)
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0098_update_startup_update_20220408_0441'),
]
operations = [
migrations.AddField(
model_name='program',
name='hubspot_url',
field=models.URLField(blank=True, null=True),
),
migrations.AddField(
model_name='program',
name='program_image',
field=models.ImageField(null=True, upload_to=''),
),
]
|
Switch from optparase to argparse
|
# -*- coding: utf-8 -*-
import argparse
import logging
import os
import signal
import time
from watchdog.observers import Observer
from pyautotest.observers import Notifier, ChangeHandler
# Configure logging
logging.basicConfig(format='%(asctime)s (%(name)s) [%(levelname)s]: %(message)s',
datefmt='%m-%d-%Y %H:%M:%S',
level=logging.INFO)
logger = logging.getLogger('pyautotest')
def main():
parser = argparse.ArgumentParser(description="Continuously run unit tests when changes detected")
parser.add_argument('-l', '--log-level',
metavar='L',
default='INFO',
dest='loglevel',
action='store',
help='set logger level')
args = parser.parse_args()
# Handle options
logger.setLevel(getattr(logging, args.loglevel.upper(), None))
while True:
event_handler = ChangeHandler()
event_handler.run_tests()
observer = Observer()
observer.schedule(event_handler, os.getcwd(), recursive=True)
# Avoid child zombie processes
signal.signal(signal.SIGCHLD, signal.SIG_IGN)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
observer.join()
if __name__ == "__main__":
main()
|
# -*- coding: utf-8 -*-
import logging
import os
import signal
import time
from optparse import OptionParser
from watchdog.observers import Observer
from pyautotest.observers import Notifier, ChangeHandler
# Configure logging
logging.basicConfig(format='%(asctime)s (%(name)s) [%(levelname)s]: %(message)s',
datefmt='%m-%d-%Y %H:%M:%S',
level=logging.INFO)
logger = logging.getLogger('pyautotest')
def main():
parser = OptionParser("usage: %prog [options]")
parser.set_defaults(loglevel="INFO")
parser.add_option("-l", "--log-level", action="store", dest="loglevel")
(options, args) = parser.parse_args()
# Handle options
logger.setLevel(getattr(logging, options.loglevel.upper(), None))
while True:
event_handler = ChangeHandler()
event_handler.run_tests()
observer = Observer()
observer.schedule(event_handler, os.getcwd(), recursive=True)
# Avoid child zombie processes
signal.signal(signal.SIGCHLD, signal.SIG_IGN)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
observer.join()
if __name__ == "__main__":
main()
|
Fix absolute reference to logfile location
|
import pathlib
_basedir = pathlib.Path(__file__).parents[1]
SQLALCHEMY_DATABASE_URI = (
'sqlite:///' + str(_basedir.joinpath(pathlib.PurePath('app.db')).resolve())
)
SQLALCHEMY_TRACK_MODIFICATIONS = True
SECRET_KEY = 'INSECURE'
MAIL_SERVER = 'localhost'
MAIL_PORT = '25'
MAIL_DEFAULT_SENDER = 'no-reply@localhost.localdomain'
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"verbose": {
"format": '%(asctime)s %(levelname)s: %(message)s '
'[in %(pathname)s:%(lineno)d]'
},
},
"handlers": {
"file": {
"level": "DEBUG",
"formatter": "verbose",
"class": "iis.log.LockingFileHandler",
"filename": "./iis.log"
},
},
"loggers": {
"iis": {
"level": "DEBUG",
"handlers": ["file"]
},
}
}
LOGGER_NAME = "iis"
|
import pathlib
_basedir = pathlib.Path(__file__).parents[1]
SQLALCHEMY_DATABASE_URI = (
'sqlite:///' + str(_basedir.joinpath(pathlib.PurePath('app.db')).resolve())
)
SQLALCHEMY_TRACK_MODIFICATIONS = True
SECRET_KEY = 'INSECURE'
MAIL_SERVER = 'localhost'
MAIL_PORT = '25'
MAIL_DEFAULT_SENDER = 'no-reply@localhost.localdomain'
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"verbose": {
"format": '%(asctime)s %(levelname)s: %(message)s '
'[in %(pathname)s:%(lineno)d]'
},
},
"handlers": {
"file": {
"level": "DEBUG",
"formatter": "verbose",
"class": "iis.log.LockingFileHandler",
"filename": "/home/max/Projects/iis/iis.log"
},
},
"loggers": {
"iis": {
"level": "DEBUG",
"handlers": ["file"]
},
}
}
LOGGER_NAME = "iis"
|
Fix issue where StyleSheet.create() is called when it doesn't have to be
|
import { StyleSheet } from 'react-native';
import { flatten } from 'lodash';
export const cachedStyles = {};
export const styleHasher = JSON.stringify; // todo: how bad is this?
// Uses cached or generates a new StyleSheet for a given style prop
const createStyleSheet = (stylesToGenerate) => {
let styles = flatten([stylesToGenerate]); // Need to make sure we're working with a flat array
const styleSheet = {}; // passed to StyleSheet.create later
// Load style from cache or add style to stylesheet
styles.forEach((style, index) => {
if (typeof style !== 'object' || !style) return;
const hash = styleHasher(style);
if (cachedStyles[hash]) {
styles[index] = cachedStyles[hash];
} else {
styleSheet[`${index}`] = style;
}
});
if (Object.keys(styleSheet).length) {
// Generate the new stylesheet
const generatedStyleSheet = StyleSheet.create(styleSheet);
// Process the generated stylesheet
Object.keys(generatedStyleSheet).forEach((key) => {
const index = parseInt(key, 0);
const generatedStyle = generatedStyleSheet[key];
const hash = styleHasher(styles[index]);
// add generated style to cache
cachedStyles[hash] = generatedStyle;
// swap generated style into result list
styles[index] = generatedStyle;
});
}
if (styles.length === 1) styles = styles[0]; // eslint-disable-line
return styles;
};
export default createStyleSheet;
|
import { StyleSheet } from 'react-native';
import { flatten } from 'lodash';
export const cachedStyles = {};
export const styleHasher = JSON.stringify; // todo: how bad is this?
// Uses cached or generates a new StyleSheet for a given style prop
const createStyleSheet = (stylesToGenerate) => {
let styles = flatten([stylesToGenerate]); // Need to make sure we're working with a flat array
const styleSheet = {}; // passed to StyleSheet.create later
// Load style from cache or add style to stylesheet
styles.forEach((style, index) => {
if (typeof style !== 'object' || !style) return;
const hash = styleHasher(style);
if (cachedStyles[hash]) {
styles[index] = cachedStyles[hash];
} else {
styleSheet[`${index}`] = style;
}
});
// Generate the new stylesheet
const generatedStyleSheet = StyleSheet.create(styleSheet);
// Process the generated stylesheet
Object.keys(generatedStyleSheet).forEach((key) => {
const index = parseInt(key, 0);
const generatedStyle = generatedStyleSheet[key];
const hash = styleHasher(styles[index]);
// add generated style to cache
cachedStyles[hash] = generatedStyle;
// swap generated style into result list
styles[index] = generatedStyle;
});
if (styles.length === 1) styles = styles[0]; // eslint-disable-line
return styles;
};
export default createStyleSheet;
|
Fix in test for pathologic grammars.
|
# -*- coding: utf-8 -*-
#######################################################################
# Name: test_pathologic_models
# Purpose: Test for grammar models that could lead to infinite loops are
# handled properly.
# Author: Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# Copyright: (c) 2014 Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# License: MIT License
#######################################################################
from __future__ import unicode_literals
import pytest
from arpeggio import ZeroOrMore, Optional, ParserPython, NoMatch, EOF
def test_optional_inside_zeroormore():
"""
Test optional match inside a zero or more.
Optional should always succeed thus inducing ZeroOrMore
to try the match again.
Arpeggio handle this case.
"""
def grammar(): return ZeroOrMore(Optional('a')), EOF
parser = ParserPython(grammar)
with pytest.raises(NoMatch):
# This could lead to infinite loop
parser.parse('b')
|
# -*- coding: utf-8 -*-
#######################################################################
# Name: test_pathologic_models
# Purpose: Test for grammar models that could lead to infinite loops are
# handled properly.
# Author: Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# Copyright: (c) 2014 Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# License: MIT License
#######################################################################
from __future__ import unicode_literals
import pytest
from arpeggio import ZeroOrMore, Optional, ParserPython, NoMatch
def test_optional_inside_zeroormore():
"""
Test optional match inside a zero or more.
Optional should always succeed thus inducing ZeroOrMore
to try the match again.
Arpeggio handle this using soft failures.
"""
def grammar(): return ZeroOrMore(Optional('a'))
parser = ParserPython(grammar)
with pytest.raises(NoMatch):
# This could lead to infinite loop
parser.parse('b')
|
Effects: Refactor transfer back compat to avoid bad nested return
Download builder replaces return statements while concatenating a
bundle, but fails to deal with this nested return. Since this module
is already deprecated, pulling the return to the top level makes
more sense.
|
/*!
* jQuery UI Effects Transfer @VERSION
* http://jqueryui.com
*
* Copyright jQuery Foundation and other contributors
* Released under the MIT license.
* http://jquery.org/license
*/
//>>label: Transfer Effect
//>>group: Effects
//>>description: Displays a transfer effect from one element to another.
//>>docs: http://api.jqueryui.com/transfer-effect/
//>>demos: http://jqueryui.com/effect/
( function( factory ) {
if ( typeof define === "function" && define.amd ) {
// AMD. Register as an anonymous module.
define( [
"jquery",
"../version",
"../effect"
], factory );
} else {
// Browser globals
factory( jQuery );
}
}( function( $ ) {
var effect;
if ( $.uiBackCompat !== false ) {
effect = $.effects.define( "transfer", function( options, done ) {
$( this ).transfer( options, done );
} );
}
return effect;
} ) );
|
/*!
* jQuery UI Effects Transfer @VERSION
* http://jqueryui.com
*
* Copyright jQuery Foundation and other contributors
* Released under the MIT license.
* http://jquery.org/license
*/
//>>label: Transfer Effect
//>>group: Effects
//>>description: Displays a transfer effect from one element to another.
//>>docs: http://api.jqueryui.com/transfer-effect/
//>>demos: http://jqueryui.com/effect/
( function( factory ) {
if ( typeof define === "function" && define.amd ) {
// AMD. Register as an anonymous module.
define( [
"jquery",
"../version",
"../effect"
], factory );
} else {
// Browser globals
factory( jQuery );
}
}( function( $ ) {
if ( $.uiBackCompat !== false ) {
return $.effects.define( "transfer", function( options, done ) {
$( this ).transfer( options, done );
} );
}
} ) );
|
Annotate jsdoc for FilterBuilder class as private
|
import { cloneDeep } from 'lodash'
import filters from './index'
import { boolMerge } from '../utils'
export default class FilterBuilder {
constructor () {
this._filters = {}
}
/**
* Apply a filter of a given type providing all the necessary arguments,
* passing these arguments directly to the specified filter builder. Merges
* existing filter(s) with the new filter.
*
* @private
*
* @param {String} type Name of the filter type.
* @param {...args} args Arguments passed to filter builder.
* @returns {FilterBuilder} Builder class.
*/
filter(type, ...args) {
this._filter('and', type, ...args)
return this
}
_filter(boolType, filterType, ...args) {
let klass = filters[filterType]
let newFilter
if (!klass) {
throw new TypeError(`Filter type ${filterType} not found.`)
}
newFilter = klass(...args)
this._filters = boolMerge(newFilter, this._filters, boolType)
return this
}
/**
* Alias to FilterBuilder#filter.
*
* @private
*
* @returns {FilterBuilder} Builder class.
*/
andFilter(...args) {
return this._filter('and', ...args)
}
orFilter(type, ...args) {
this._filter('or', type, ...args)
return this
}
notFilter(type, ...args) {
this._filter('not', type, ...args)
return this
}
get filters () {
return cloneDeep(this._filters)
}
}
|
import { cloneDeep } from 'lodash'
import filters from './index'
import { boolMerge } from '../utils'
export default class FilterBuilder {
constructor () {
this._filters = {}
}
/**
* Apply a filter of a given type providing all the necessary arguments,
* passing these arguments directly to the specified filter builder. Merges
* existing filter(s) with the new filter.
*
* @param {String} type Name of the filter type.
* @param {...args} args Arguments passed to filter builder.
* @returns {FilterBuilder} Builder class.
*/
filter(type, ...args) {
this._filter('and', type, ...args)
return this
}
_filter(boolType, filterType, ...args) {
let klass = filters[filterType]
let newFilter
if (!klass) {
throw new TypeError(`Filter type ${filterType} not found.`)
}
newFilter = klass(...args)
this._filters = boolMerge(newFilter, this._filters, boolType)
return this
}
/**
* Alias to FilterBuilder#filter.
*
* @private
*
* @returns {FilterBuilder} Builder class.
*/
andFilter(...args) {
return this._filter('and', ...args)
}
orFilter(type, ...args) {
this._filter('or', type, ...args)
return this
}
notFilter(type, ...args) {
this._filter('not', type, ...args)
return this
}
get filters () {
return cloneDeep(this._filters)
}
}
|
Make onContentsChanged public in the uitemstackhandler
|
package info.u_team.u_team_core.inventory;
import info.u_team.u_team_core.api.item.IExtendedItemHandler;
import net.minecraft.inventory.ItemStackHelper;
import net.minecraft.nbt.CompoundNBT;
import net.minecraftforge.items.ItemStackHandler;
public class UItemStackHandler extends ItemStackHandler implements IExtendedItemHandler {
public UItemStackHandler(int size) {
super(size);
}
@Override
public void setSize(int size) {
throw new UnsupportedOperationException();
}
@Override
public CompoundNBT serializeNBT() {
final CompoundNBT compound = new CompoundNBT();
ItemStackHelper.saveAllItems(compound, stacks, false);
return compound;
}
@Override
public void deserializeNBT(CompoundNBT compound) {
ItemStackHelper.loadAllItems(compound, stacks);
onLoad();
}
@Override
public void onContentsChanged(int slot) {
super.onContentsChanged(slot);
}
}
|
package info.u_team.u_team_core.inventory;
import info.u_team.u_team_core.api.item.IExtendedItemHandler;
import net.minecraft.inventory.ItemStackHelper;
import net.minecraft.nbt.CompoundNBT;
import net.minecraftforge.items.ItemStackHandler;
public class UItemStackHandler extends ItemStackHandler implements IExtendedItemHandler {
public UItemStackHandler(int size) {
super(size);
}
@Override
public void setSize(int size) {
throw new UnsupportedOperationException();
}
@Override
public CompoundNBT serializeNBT() {
final CompoundNBT compound = new CompoundNBT();
ItemStackHelper.saveAllItems(compound, stacks, false);
return compound;
}
@Override
public void deserializeNBT(CompoundNBT compound) {
ItemStackHelper.loadAllItems(compound, stacks);
onLoad();
}
}
|
Fix test broken by previous commit.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) 2015-2018, ETH Zurich, Institut fuer Theoretische Physik
# Author: Dominik Gresch <greschd@gmx.ch>
"""Tests for creating a simple tight-binding model."""
import pytest
from parameters import T_VALUES, KPT
@pytest.mark.parametrize('t1', T_VALUES)
@pytest.mark.parametrize('k', KPT)
def test_simple(t1, get_model, k, compare_data, models_equal, compare_isclose):
"""Regression test for a simple manually created tight-binding model."""
model = get_model(*t1)
compare_isclose(model.hamilton(k), tag='hamilton')
compare_isclose(model.eigenval(k), tag='eigenval')
compare_data(models_equal, model)
def test_invalid_dim(get_model):
"""
Check that an error is raised when the reciprocal lattice vector
does not match the dimension.
"""
model = get_model(0.1, 0.2)
model.add_hop(1j, 0, 1, (0, 1, 2))
with pytest.raises(ValueError):
model.add_hop(1j, 0, 1, (0, 1))
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) 2015-2018, ETH Zurich, Institut fuer Theoretische Physik
# Author: Dominik Gresch <greschd@gmx.ch>
"""Tests for creating a simple tight-binding model."""
import pytest
from parameters import T_VALUES, KPT
@pytest.mark.parametrize('t1', T_VALUES)
@pytest.mark.parametrize('k', KPT)
def test_simple(t1, get_model, k, compare_data, models_equal, compare_isclose):
"""Regression test for a simple manually created tight-binding model."""
model = get_model(*t1)
compare_isclose(model.hamilton(k), tag='hamilton')
compare_isclose(model.eigenval(k), tag='eigenval')
compare_data(models_equal, model)
@pytest.mark.parametrize('t1', T_VALUES)
def test_invalid_dim(t1, get_model):
"""
Check that an error is raised when the dimension does not match
the hopping matrix keys.
"""
with pytest.raises(ValueError):
get_model(*t1, dim=2)
|
Make the order of LINKS output consistent
|
from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import Command, ICommand, IModuleData, ModuleData
from zope.interface import implements
class LinksCommand(ModuleData, Command):
implements(IPlugin, IModuleData, ICommand)
name = "LinksCommand"
core = True
def userCommands(self):
return [ ("LINKS", 1, self) ]
def parseParams(self, user, params, prefix, tags):
return {}
def execute(self, user, data):
user.sendMessage(irc.RPL_LINKS, self.ircd.name, self.ircd.name, "0 {}".format(self.ircd.config["server_description"]))
for server in self.ircd.servers.itervalues():
hopCount = 1
nextServer = server.nextClosest
while nextServer != self.ircd.serverID:
nextServer = self.ircd.servers[nextServer].nextClosest
hopCount += 1
if server.nextClosest == self.ircd.serverID:
nextClosestName = self.ircd.name
else:
nextClosestName = self.ircd.servers[server.nextClosest].name
user.sendMessage(irc.RPL_LINKS, server.name, nextClosestName, "{} {}".format(hopCount, server.description))
user.sendMessage(irc.RPL_ENDOFLINKS, "*", "End of /LINKS list.")
return True
linksCmd = LinksCommand()
|
from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import Command, ICommand, IModuleData, ModuleData
from zope.interface import implements
class LinksCommand(ModuleData, Command):
implements(IPlugin, IModuleData, ICommand)
name = "LinksCommand"
core = True
def userCommands(self):
return [ ("LINKS", 1, self) ]
def parseParams(self, user, params, prefix, tags):
return {}
def execute(self, user, data):
for server in self.ircd.servers.itervalues():
hopCount = 1
nextServer = server.nextClosest
while nextServer != self.ircd.serverID:
nextServer = self.ircd.servers[nextServer].nextClosest
hopCount += 1
if server.nextClosest == self.ircd.serverID:
nextClosestName = self.ircd.name
else:
nextClosestName = self.ircd.servers[server.nextClosest].name
user.sendMessage(irc.RPL_LINKS, server.name, nextClosestName, "{} {}".format(hopCount, server.description))
user.sendMessage(irc.RPL_LINKS, self.ircd.name, self.ircd.name, "0 {}".format(self.ircd.config["server_description"]))
user.sendMessage(irc.RPL_ENDOFLINKS, "*", "End of /LINKS list.")
return True
linksCmd = LinksCommand()
|
Fix broken test in maybeSendVerificationEmail
|
import Users from "meteor/vulcan:users";
import { addCallback } from 'meteor/vulcan:core';
const TRUSTLEVEL1_THRESHOLD = 2000
function updateTrustedStatus ({newDocument, vote}) {
const user = Users.findOne(newDocument.userId)
if (user.karma >= TRUSTLEVEL1_THRESHOLD && (!Users.getGroups(user).includes('trustLevel1'))) {
Users.update(user._id, {$push: {groups: 'trustLevel1'}});
const updatedUser = Users.findOne(newDocument.userId)
//eslint-disable-next-line no-console
console.info("User gained trusted status", updatedUser.username, updatedUser._id, updatedUser.karma, updatedUser.groups)
}
}
addCallback("votes.smallUpvote.async", updateTrustedStatus);
addCallback("votes.bigUpvote.async", updateTrustedStatus);
function maybeSendVerificationEmail (modifier, user)
{
if(modifier.$set.whenConfirmationEmailSent
&& (!user.whenConfirmationEmailSent
|| user.whenConfirmationEmailSent.getTime() !== modifier.$set.whenConfirmationEmailSent.getTime()))
{
Accounts.sendVerificationEmail(user._id);
}
}
addCallback("users.edit.sync", maybeSendVerificationEmail);
|
import Users from "meteor/vulcan:users";
import { addCallback } from 'meteor/vulcan:core';
const TRUSTLEVEL1_THRESHOLD = 2000
function updateTrustedStatus ({newDocument, vote}) {
const user = Users.findOne(newDocument.userId)
if (user.karma >= TRUSTLEVEL1_THRESHOLD && (!Users.getGroups(user).includes('trustLevel1'))) {
Users.update(user._id, {$push: {groups: 'trustLevel1'}});
const updatedUser = Users.findOne(newDocument.userId)
//eslint-disable-next-line no-console
console.info("User gained trusted status", updatedUser.username, updatedUser._id, updatedUser.karma, updatedUser.groups)
}
}
addCallback("votes.smallUpvote.async", updateTrustedStatus);
addCallback("votes.bigUpvote.async", updateTrustedStatus);
function maybeSendVerificationEmail (modifier, user)
{
if(modifier.$set.whenConfirmationEmailSent
&& (!user.whenConfirmationEmailSent
|| user.whenConfirmationEmailSent.getTime() !== modifier.$set.whenConfirmationEmailSent))
{
Accounts.sendVerificationEmail(user._id);
}
}
addCallback("users.edit.sync", maybeSendVerificationEmail);
|
Quit Blender after writing FBX
|
# !/usr/bin/env python
# Blender has moved to Python 3!
import sys
import os
import bpy
for sysarg in sys.argv:
print(sysarg)
py_args = sys.argv[sys.argv.index('--') + 1]
py_args = py_args.split(' ')
for arg in py_args:
if (arg.startswith('basedir:')):
basedir = arg.split('basedir:')[1]
else:
# can supply filename(s) with or without extension
pdb_code = os.path.splitext(arg)[0]
abs_file_in = os.path.join(basedir, 'structures/wrl', pdb_code+'.wrl')
# This is the base directory, used for saving files
molecule = bpy.ops.import_scene.x3d(
filepath = abs_file_in
)
abs_file_out = os.path.join(basedir,'structures/fbx',pdb_code+'.fbx')
bpy.ops.export_scene.fbx(
filepath = abs_file_out
)
bpy.ops.wm.quit_blender()
|
# !/usr/bin/env python
# Blender has moved to Python 3!
import sys
import os
import bpy
for sysarg in sys.argv:
print(sysarg)
py_args = sys.argv[sys.argv.index('--') + 1]
py_args = py_args.split(' ')
for arg in py_args:
if (arg.startswith('basedir:')):
basedir = arg.split('basedir:')[1]
else:
# can supply filename(s) with or without extension
pdb_code = os.path.splitext(arg)[0]
abs_file_in = os.path.join(basedir, 'structures/wrl', pdb_code+'.wrl')
# This is the base directory, used for saving files
molecule = bpy.ops.import_scene.x3d(
filepath = abs_file_in
)
abs_file_out = os.path.join(basedir,'structures/fbx',pdb_code+'.fbx')
bpy.ops.export_scene.fbx(
filepath = abs_file_out
)
|
Add MIDDLEWARE_CLASSES to test settings
Squelches a warning when using Django 1.7.
|
#!/usr/bin/env python
import django
from django.conf import settings
from django.core.management import call_command
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
ALLOWED_HOSTS=[
'testserver',
],
INSTALLED_APPS=[
'django_nose',
'permissions',
'permissions.tests',
],
MIDDLEWARE_CLASSES=[],
ROOT_URLCONF='permissions.tests.urls',
TEST_RUNNER='django_nose.NoseTestSuiteRunner'
)
if django.VERSION[:2] >= (1, 7):
from django import setup
else:
setup = lambda: None
setup()
call_command("test")
|
#!/usr/bin/env python
import django
from django.conf import settings
from django.core.management import call_command
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
ALLOWED_HOSTS=[
'testserver',
],
INSTALLED_APPS=[
'django_nose',
'permissions',
'permissions.tests',
],
ROOT_URLCONF='permissions.tests.urls',
TEST_RUNNER='django_nose.NoseTestSuiteRunner'
)
if django.VERSION[:2] >= (1, 7):
from django import setup
else:
setup = lambda: None
setup()
call_command("test")
|
Update djsonb, and add pytz
|
#!/usr/bin/env python
from setuptools import setup, find_packages
tests_require = []
setup(
name='ashlar',
version='0.0.2',
description='Define and validate schemas for metadata for geotemporal event records',
author='Azavea, Inc.',
author_email='info@azavea.com',
keywords='gis jsonschema',
packages=find_packages(exclude=['tests']),
dependency_links=[
'https://github.com/azavea/djsonb/tarball/develop#egg=djsonb-0.1.5'
],
install_requires=[
'Django ==1.8.6',
'djangorestframework >=3.1.1',
'djangorestframework-gis >=0.8.1',
'django-filter >=0.9.2',
'djsonb >=0.1.5',
'jsonschema >=2.4.0',
'psycopg2 >=2.6',
'django-extensions >=1.5.2',
'python-dateutil >=2.4.2',
'PyYAML >=3.11',
'pytz >= 2015.7'
],
extras_require={
'dev': [],
'test': tests_require
},
test_suite='tests',
tests_require=tests_require,
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
tests_require = []
setup(
name='ashlar',
version='0.0.2',
description='Define and validate schemas for metadata for geotemporal event records',
author='Azavea, Inc.',
author_email='info@azavea.com',
keywords='gis jsonschema',
packages=find_packages(exclude=['tests']),
dependency_links=[
'https://github.com/azavea/djsonb/tarball/develop#egg=djsonb-0.1.4'
],
install_requires=[
'Django ==1.8.6',
'djangorestframework >=3.1.1',
'djangorestframework-gis >=0.8.1',
'django-filter >=0.9.2',
'djsonb >=0.1.4',
'jsonschema >=2.4.0',
'psycopg2 >=2.6',
'django-extensions >=1.5.2',
'python-dateutil >=2.4.2',
'PyYAML >=3.11'
],
extras_require={
'dev': [],
'test': tests_require
},
test_suite='tests',
tests_require=tests_require,
)
|
Fix the usage for `service rm` command
Signed-off-by: Yi EungJun <d8a05a153c80bfd80b76dd9b85a05eebcec2ca4e@navercorp.com>
|
package service
import (
"fmt"
"strings"
"github.com/docker/docker/api/client"
"github.com/docker/docker/cli"
"github.com/spf13/cobra"
"golang.org/x/net/context"
)
func newRemoveCommand(dockerCli *client.DockerCli) *cobra.Command {
cmd := &cobra.Command{
Use: "rm [OPTIONS] SERVICE [SERVICE...]",
Aliases: []string{"remove"},
Short: "Remove a service",
Args: cli.RequiresMinArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
return runRemove(dockerCli, args)
},
}
cmd.Flags()
return cmd
}
func runRemove(dockerCli *client.DockerCli, sids []string) error {
client := dockerCli.Client()
ctx := context.Background()
var errs []string
for _, sid := range sids {
err := client.ServiceRemove(ctx, sid)
if err != nil {
errs = append(errs, err.Error())
continue
}
fmt.Fprintf(dockerCli.Out(), "%s\n", sid)
}
if len(errs) > 0 {
return fmt.Errorf(strings.Join(errs, "\n"))
}
return nil
}
|
package service
import (
"fmt"
"strings"
"github.com/docker/docker/api/client"
"github.com/docker/docker/cli"
"github.com/spf13/cobra"
"golang.org/x/net/context"
)
func newRemoveCommand(dockerCli *client.DockerCli) *cobra.Command {
cmd := &cobra.Command{
Use: "rm [OPTIONS] SERVICE",
Aliases: []string{"remove"},
Short: "Remove a service",
Args: cli.RequiresMinArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
return runRemove(dockerCli, args)
},
}
cmd.Flags()
return cmd
}
func runRemove(dockerCli *client.DockerCli, sids []string) error {
client := dockerCli.Client()
ctx := context.Background()
var errs []string
for _, sid := range sids {
err := client.ServiceRemove(ctx, sid)
if err != nil {
errs = append(errs, err.Error())
continue
}
fmt.Fprintf(dockerCli.Out(), "%s\n", sid)
}
if len(errs) > 0 {
return fmt.Errorf(strings.Join(errs, "\n"))
}
return nil
}
|
Fix issue where modX was not yet available
|
<?php namespace MODX\Shell\Command\User;
use MODX\Shell\Command\ListProcessor;
/**
* List all users for the current modX instance
*/
class GetList extends ListProcessor
{
protected $processor = 'security/user/getlist';
protected $headers = array(
'id', 'username', 'active', 'sudo'
);
protected $name = 'user:list';
protected $description = 'List users';
protected function configure()
{
$this->getMODX();
$version = $this->modx->getVersionData();
if (version_compare($version['full_version'], '2.2.0-pl', '<')) {
$this->headers = array(
'id', 'username', 'active'
);
}
}
/**
* Format the "modUer.active" field as boolean
*
* @param bool $value
*
* @return string
*/
protected function formatActive($value)
{
return $this->renderBoolean($value);
}
/**
* Format the "modUser.sudo" field as boolean
*
* @param bool $value
*
* @return string
*/
protected function formatSudo($value)
{
return $this->renderBoolean($value);
}
}
|
<?php namespace MODX\Shell\Command\User;
use MODX\Shell\Command\ListProcessor;
/**
* List all users for the current modX instance
*/
class GetList extends ListProcessor
{
protected $processor = 'security/user/getlist';
protected $headers = array(
'id', 'username', 'active', 'sudo'
);
protected $name = 'user:list';
protected $description = 'List users';
protected function configure()
{
$version = $this->modx->getVersionData();
if (version_compare($version['full_version'], '2.2.0-pl', '<')) {
$this->headers = array(
'id', 'username', 'active'
);
}
}
/**
* Format the "modUer.active" field as boolean
*
* @param bool $value
*
* @return string
*/
protected function formatActive($value)
{
return $this->renderBoolean($value);
}
/**
* Format the "modUser.sudo" field as boolean
*
* @param bool $value
*
* @return string
*/
protected function formatSudo($value)
{
return $this->renderBoolean($value);
}
}
|
Disable SSL mode in DB connection for now
|
package db
import (
"database/sql"
"fmt"
"github.com/rafaeljusto/cctldstats/config"
)
// Connection database connection.
var Connection *sql.DB
// Connect performs the database connection. Today the following databases are supported: mysql and postgres
func Connect() (err error) {
var connParams string
switch config.CCTLDStats.Database.Kind {
case "mysql":
connParams = fmt.Sprintf("%s:%s@tcp(%s)/%s",
config.CCTLDStats.Database.Username,
config.CCTLDStats.Database.Password,
config.CCTLDStats.Database.Host,
config.CCTLDStats.Database.Name,
)
case "postgres":
connParams = fmt.Sprintf("postgres://%s:%s@%s/%s",
config.CCTLDStats.Database.Username,
config.CCTLDStats.Database.Password,
config.CCTLDStats.Database.Host,
config.CCTLDStats.Database.Name,
)
}
Connection, err = sql.Open(config.CCTLDStats.Database.Kind, connParams)
return
}
|
package db
import (
"database/sql"
"fmt"
"github.com/rafaeljusto/cctldstats/config"
)
// Connection database connection.
var Connection *sql.DB
// Connect performs the database connection. Today the following databases are supported: mysql and postgres
func Connect() (err error) {
var connParams string
switch config.CCTLDStats.Database.Kind {
case "mysql":
connParams = fmt.Sprintf("%s:%s@tcp(%s)/%s",
config.CCTLDStats.Database.Username,
config.CCTLDStats.Database.Password,
config.CCTLDStats.Database.Host,
config.CCTLDStats.Database.Name,
)
case "postgres":
connParams = fmt.Sprintf("postgres://%s:%s@%s/%s?sslmode=verify-full",
config.CCTLDStats.Database.Username,
config.CCTLDStats.Database.Password,
config.CCTLDStats.Database.Host,
config.CCTLDStats.Database.Name,
)
}
Connection, err = sql.Open(config.CCTLDStats.Database.Kind, connParams)
return
}
|
Add back logging for production debug
|
package com.psidox.saddlewoof.web.rest;
import javax.ws.rs.container.ContainerRequestContext;
import javax.ws.rs.container.ContainerRequestFilter;
import javax.ws.rs.container.PreMatching;
import javax.ws.rs.ext.Provider;
import java.io.IOException;
@Provider
@PreMatching
public class Interceptor implements ContainerRequestFilter {
@Override
public void filter(ContainerRequestContext reqContext) throws IOException {
String request = "";
request += String.format("uri: %s %s %s\n", reqContext.getMethod(), reqContext.getUriInfo().getPath(), reqContext.getUriInfo().getQueryParameters().toString());
request += String.format("lang: %s\n", reqContext.getAcceptableLanguages());
request += String.format("media: %s\n", reqContext.getAcceptableMediaTypes());
request += "headers:\n";
for (String key: reqContext.getHeaders().keySet()) {
request += String.format("+ %s: %s\n", key, reqContext.getHeaders().get(key));
}
//request += String.format("body: %s\n", IOUtils.toString(reqContext.getEntityStream()));
System.out.println(request);
}
}
|
package com.psidox.saddlewoof.web.rest;
import javax.ws.rs.container.ContainerRequestContext;
import javax.ws.rs.container.ContainerRequestFilter;
import javax.ws.rs.container.PreMatching;
import javax.ws.rs.ext.Provider;
import java.io.IOException;
@Provider
@PreMatching
public class Interceptor implements ContainerRequestFilter {
@Override
public void filter(ContainerRequestContext reqContext) throws IOException {
// String request = "";
// request += String.format("uri: %s %s %s\n", reqContext.getMethod(), reqContext.getUriInfo().getPath(), reqContext.getUriInfo().getQueryParameters().toString());
// request += String.format("lang: %s\n", reqContext.getAcceptableLanguages());
// request += String.format("media: %s\n", reqContext.getAcceptableMediaTypes());
// request += "headers:\n";
// for (String key: reqContext.getHeaders().keySet()) {
// request += String.format("+ %s: %s\n", key, reqContext.getHeaders().get(key));
// }
// //request += String.format("body: %s\n", IOUtils.toString(reqContext.getEntityStream()));
//
// System.out.println(request);
}
}
|
Add Polyfill to standalone build, bring exports on par with lib build
|
import 'babel/polyfill';
import 'isomorphic-fetch';
// Configuration
export {default as render} from './render';
export {default as configure} from './configure';
export {default as configureRoutes} from './configureRoutes';
export {configureJSXRoutes} from './configureRoutes';
// Components
export {default as Card} from './components/Card/Card';
export {default as Page} from './components/Page/Page';
export {default as Span} from './components/Specimen/Span';
// Higher-order component for creating specimens
export {default as Specimen} from './components/Specimen/Specimen';
export {default as mapSpecimenOption} from './utils/mapSpecimenOption';
// Specimens
export {default as AudioSpecimen} from './specimens/Audio';
export {default as CodeSpecimen} from './specimens/Code';
export {default as ColorSpecimen} from './specimens/Color';
export {default as ColorPaletteSpecimen} from './specimens/ColorPalette';
export {default as HtmlSpecimen} from './specimens/Html';
export {default as HintSpecimen} from './specimens/Hint';
export {default as ImageSpecimen} from './specimens/Image';
export {default as TypeSpecimen} from './specimens/Type';
export {default as DownloadSpecimen} from './specimens/Download';
export {default as VideoSpecimen} from './specimens/Video';
|
import 'isomorphic-fetch';
// Configuration
export {default as render} from './render';
export {default as configure} from './configure';
export {default as configureRoutes} from './configureRoutes';
export {configureJSXRoutes} from './configureRoutes';
// Components
export {default as Card} from './components/Page/Page';
export {default as Page} from './components/Page/Page';
export {default as Span} from './components/Specimen/Span';
// Higher-order component for creating specimens
export {default as Specimen} from './components/Specimen/Specimen';
// Specimens
export {default as AudioSpecimen} from './specimens/Audio';
export {default as CodeSpecimen} from './specimens/Code';
export {default as ColorSpecimen} from './specimens/Color';
export {default as ColorPaletteSpecimen} from './specimens/ColorPalette';
export {default as HtmlSpecimen} from './specimens/Html';
export {default as HintSpecimen} from './specimens/Hint';
export {default as ImageSpecimen} from './specimens/Image';
export {default as TypeSpecimen} from './specimens/Type';
export {default as DownloadSpecimen} from './specimens/Download';
export {default as VideoSpecimen} from './specimens/Video';
|
Add tests for correct class name which ignores non-php files.
Is this code necessary since we should be ignoring non-php files already?
|
<?php
namespace li3_quality\tests\cases\test\rules;
class HasCorrectClassNameTest extends \li3_quality\test\Unit {
public $rule = 'li3_quality\test\rules\HasCorrectClassName';
public function testWithClosingTag() {
$code = <<<EOD
namespace bar/baz;
class FooBar {}
EOD;
$this->assertRulePass(array(
'source' => $code,
'path' => '/bar/baz/FooBar.php',
), $this->rule);
}
public function testWithoutClosingTag() {
$code = <<<EOD
namespace bar/baz;
class foobar {}
EOD;
$this->assertRuleFail(array(
'source' => $code,
'path' => '/bar/baz/foobar.php',
), $this->rule);
}
public function testWithoutMatchingFileName() {
$code = <<<EOD
namespace bar/baz;
class FooBar {}
EOD;
$this->assertRuleFail(array(
'source' => $code,
'path' => '/bar/baz/foo.php',
), $this->rule);
}
public function testNonPHPFile() {
$code = <<<EOD
namespace bar/baz;
class FooBar {}
EOD;
$this->assertRulePass(array(
'source' => $code,
'path' => '/bar/baz/foo.txt',
), $this->rule);
}
}
?>
|
<?php
namespace li3_quality\tests\cases\test\rules;
class HasCorrectClassNameTest extends \li3_quality\test\Unit {
public $rule = 'li3_quality\test\rules\HasCorrectClassName';
public function testWithClosingTag() {
$code = <<<EOD
namespace bar/baz;
class FooBar {}
EOD;
$this->assertRulePass(array(
'source' => $code,
'path' => '/bar/baz/FooBar.php',
), $this->rule);
}
public function testWithoutClosingTag() {
$code = <<<EOD
namespace bar/baz;
class foobar {}
EOD;
$this->assertRuleFail(array(
'source' => $code,
'path' => '/bar/baz/foobar.php',
), $this->rule);
}
public function testWithoutMatchingFileName() {
$code = <<<EOD
namespace bar/baz;
class FooBar {}
EOD;
$this->assertRuleFail(array(
'source' => $code,
'path' => '/bar/baz/foo.php',
), $this->rule);
}
}
?>
|
Add tests to make sure each rule has a recommended setting and a description
|
import fs from 'fs';
import test from 'ava';
import pify from 'pify';
import index from '../';
test('every rule should defined in the index file and recommended settings', async t => {
const files = await pify(fs.readdir, Promise)('../rules/');
const rules = files.filter(file => file.indexOf('.js') === file.length - 3);
rules.forEach(file => {
const name = file.slice(0, -3);
t.truthy(index.rules[name], `'${name}' is not exported in 'index.js'`);
t.truthy(index.rules[name].meta.docs.description, `'${name}' does not have a description`);
t.truthy(index.rules[name].meta.docs.recommended, `'${name}' does not have a recommended setting`);
t.truthy(index.configs.recommended.rules[`fp/${name}`], `'${name}' is not set in the recommended config`);
});
t.is(Object.keys(index.rules).length, rules.length,
'There are more exported rules than rule files.');
});
test('no-var should be turned on in the recommended settings', async t => {
t.true(index.configs.recommended.rules['no-var'] === 'error');
});
|
import fs from 'fs';
import test from 'ava';
import pify from 'pify';
import index from '../';
test('every rule should defined in the index file and recommended settings', async t => {
const files = await pify(fs.readdir, Promise)('../rules/');
const rules = files.filter(file => file.indexOf('.js') === file.length - 3);
rules.forEach(file => {
const name = file.slice(0, -3);
t.truthy(index.rules[name], `'${name}' is not exported in 'index.js'`);
t.truthy(index.configs.recommended.rules[`fp/${name}`], `'${name}' is not set in the recommended config`);
});
t.is(Object.keys(index.rules).length, rules.length,
'There are more exported rules than rule files.');
});
test('no-var should be turned on in the recommended settings', async t => {
t.true(index.configs.recommended.rules['no-var'] === 'error');
});
|
Make output even more minimal.
|
from collections import OrderedDict
from server import prepare_data, query_server
from parser import parse_response
from bottle import route, request, run, view, JSONPlugin, json_dumps as dumps
from functools import partial
import bottle
bottle.TEMPLATE_PATH = ["api/views/"]
bottle.debug(True)
bottle.TEMPLATES.clear()
better_dumps = partial(dumps, separators=(',', ':'))
bottle.default_app().uninstall(JSONPlugin)
bottle.default_app().install(JSONPlugin(better_dumps))
@route('/api/')
@view('index')
def index():
site = "%s://%s" % (request.urlparts.scheme, request.urlparts.netloc)
return {"site": site}
@route('/api/tag', method=["get", "post"])
def tag():
data = request.POST.get("data", None)
if not data:
data = request.body.getvalue()
if not data:
return {"error": "No data posted"}
data = prepare_data(data)
response = query_server(data)
sentences, entities = parse_response(response)
return OrderedDict([
("sentences", sentences),
("entities", entities),
])
run(host='localhost', port=8000, reloader=True)
|
from collections import OrderedDict
from server import prepare_data, query_server
from parser import parse_response
from bottle import route, request, run, view
import bottle
bottle.TEMPLATE_PATH = ["api/views/"]
bottle.debug(True)
bottle.TEMPLATES.clear()
@route('/api/')
@view('index')
def index():
site = "%s://%s" % (request.urlparts.scheme, request.urlparts.netloc)
return {"site": site}
@route('/api/tag', method=["get", "post"])
def tag():
data = request.POST.get("data", None)
if not data:
data = request.body.getvalue()
if not data:
return {"error": "No data posted"}
data = prepare_data(data)
response = query_server(data)
sentences, entities = parse_response(response)
return OrderedDict([
("sentences", sentences),
("entities", entities),
])
run(host='localhost', port=8000, reloader=True)
|
Remove dirties context of testcontainerbase
|
package org.synyx.urlaubsverwaltung;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.DynamicPropertyRegistry;
import org.springframework.test.context.DynamicPropertySource;
import org.testcontainers.containers.MariaDBContainer;
import static org.testcontainers.containers.MariaDBContainer.NAME;
public abstract class TestContainersBase {
static final MariaDBContainer<?> mariaDB = new MariaDBContainer<>(NAME + ":10.5");
@DynamicPropertySource
static void mariaDBProperties(DynamicPropertyRegistry registry) {
mariaDB.start();
registry.add("spring.datasource.url", mariaDB::getJdbcUrl);
registry.add("spring.datasource.username", mariaDB::getUsername);
registry.add("spring.datasource.password", mariaDB::getPassword);
}
}
|
package org.synyx.urlaubsverwaltung;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.DynamicPropertyRegistry;
import org.springframework.test.context.DynamicPropertySource;
import org.testcontainers.containers.MariaDBContainer;
import static org.testcontainers.containers.MariaDBContainer.NAME;
@DirtiesContext
public abstract class TestContainersBase {
static final MariaDBContainer<?> mariaDB = new MariaDBContainer<>(NAME + ":10.5");
@DynamicPropertySource
static void mariaDBProperties(DynamicPropertyRegistry registry) {
mariaDB.start();
registry.add("spring.datasource.url", mariaDB::getJdbcUrl);
registry.add("spring.datasource.username", mariaDB::getUsername);
registry.add("spring.datasource.password", mariaDB::getPassword);
}
}
|
Add support for new ValueRestriction QueryTagValues parameter in HavenSearchComponents (FIND-975)
Add new parameters for GetQueryTagValues
[rev. matthew.gordon]
|
/*
* Copyright 2015-2016 Hewlett-Packard Development Company, L.P.
* Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
*/
package com.hp.autonomy.types.requests.idol.actions.tags.params;
public enum GetQueryTagValuesParams {
AllowNonParametricFields,
CustomWeight,
DateOffset,
DatePeriod,
DocumentCount,
FieldDependence,
FieldDependenceMultiLevel,
FieldName,
MaxValues,
Merge,
Predict,
Ranges,
RestrictedValues,
Sort,
Start,
Synchronous,
TotalValues,
ValueDetails,
ValuePercentiles,
ValueRestriction;
public static GetQueryTagValuesParams fromValue(final String value) {
for (final GetQueryTagValuesParams param : values()) {
if (param.name().equalsIgnoreCase(value)) {
return param;
}
}
throw new IllegalArgumentException("Unknown parameter " + value);
}
}
|
/*
* Copyright 2015-2016 Hewlett-Packard Development Company, L.P.
* Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
*/
package com.hp.autonomy.types.requests.idol.actions.tags.params;
public enum GetQueryTagValuesParams {
AllowNonParametricFields,
DateOffset,
DatePeriod,
DocumentCount,
FieldDependence,
FieldDependenceMultiLevel,
FieldName,
MaxValues,
Merge,
Ranges,
RestrictedValues,
Sort,
Start,
Synchronous,
TotalValues,
ValueDetails;
public static GetQueryTagValuesParams fromValue(final String value) {
for (final GetQueryTagValuesParams param : values()) {
if (param.name().equalsIgnoreCase(value)) {
return param;
}
}
throw new IllegalArgumentException("Unknown parameter " + value);
}
}
|
Set text to help menu.
|
package com.github.aureliano.edocs.app.gui.menu.help;
import javax.swing.JMenu;
import javax.swing.event.MenuEvent;
import javax.swing.event.MenuListener;
import com.github.aureliano.edocs.app.model.IDatabaseConnectionDependent;
import com.github.aureliano.edocs.common.locale.EdocsLocale;
public class HelpMenu extends JMenu implements IDatabaseConnectionDependent {
private static final long serialVersionUID = -3275074190970882751L;
private LicenseMenuItem licenseMenuItem;
private AboutMenuItem aboutMenuItem;
public HelpMenu() {
super.setText(EdocsLocale.instance().getMessage("gui.menubar.help"));
this.addMenuItems();
}
private void addMenuItems() {
this.licenseMenuItem = new LicenseMenuItem();
this.aboutMenuItem = new AboutMenuItem();
super.add(this.licenseMenuItem);
super.add(this.aboutMenuItem);
super.addMenuListener(new MenuListener() {
@Override
public void menuSelected(MenuEvent e) {
licenseMenuItem.setMenuItemAvailability();
aboutMenuItem.setMenuItemAvailability();
}
@Override
public void menuDeselected(MenuEvent e) {}
@Override
public void menuCanceled(MenuEvent e) {}
});
}
@Override
public void setDatabaseGuiEnabled(boolean enabled) {}
}
|
package com.github.aureliano.edocs.app.gui.menu.help;
import javax.swing.JMenu;
import javax.swing.event.MenuEvent;
import javax.swing.event.MenuListener;
import com.github.aureliano.edocs.app.model.IDatabaseConnectionDependent;
import com.github.aureliano.edocs.common.locale.EdocsLocale;
public class HelpMenu extends JMenu implements IDatabaseConnectionDependent {
private static final long serialVersionUID = -3275074190970882751L;
private LicenseMenuItem licenseMenuItem;
private AboutMenuItem aboutMenuItem;
public HelpMenu() {
super.setText(EdocsLocale.instance().getMessage(""));
this.addMenuItems();
}
private void addMenuItems() {
this.licenseMenuItem = new LicenseMenuItem();
this.aboutMenuItem = new AboutMenuItem();
super.add(this.licenseMenuItem);
super.add(this.aboutMenuItem);
super.addMenuListener(new MenuListener() {
@Override
public void menuSelected(MenuEvent e) {
licenseMenuItem.setMenuItemAvailability();
aboutMenuItem.setMenuItemAvailability();
}
@Override
public void menuDeselected(MenuEvent e) {}
@Override
public void menuCanceled(MenuEvent e) {}
});
}
@Override
public void setDatabaseGuiEnabled(boolean enabled) {}
}
|
Fix legacy use of action result
|
#!/usr/bin/env python
#
# Print the list of sysdig captures.
#
import os
import sys
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), '..'))
from sdcclient import SdcClient
#
# Parse arguments
#
if len(sys.argv) != 2:
print('usage: %s <sysdig-token>' % sys.argv[0])
print('You can find your token at https://app.sysdigcloud.com/#/settings/user')
sys.exit(1)
sdc_token = sys.argv[1]
#
# Instantiate the SDC client
#
sdclient = SdcClient(sdc_token)
#
# Fire the request.
#
ok, res = sdclient.get_sysdig_captures()
#
# Show the list of metrics
#
if ok:
captures = res['dumps']
else:
print(res)
sys.exit(1)
for capture in captures:
print("Folder %s, Name %s, Host: %s, Size: %d, Status: %s" %
(capture['folder'], capture['name'], capture['agent']['hostName'], capture['size'], capture['status']))
|
#!/usr/bin/env python
#
# Print the list of sysdig captures.
#
import os
import sys
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), '..'))
from sdcclient import SdcClient
#
# Parse arguments
#
if len(sys.argv) != 2:
print('usage: %s <sysdig-token>' % sys.argv[0])
print('You can find your token at https://app.sysdigcloud.com/#/settings/user')
sys.exit(1)
sdc_token = sys.argv[1]
#
# Instantiate the SDC client
#
sdclient = SdcClient(sdc_token)
#
# Fire the request.
#
ok, res = sdclient.get_sysdig_captures()
#
# Show the list of metrics
#
if ok:
captures = res[1]['dumps']
else:
print(res)
sys.exit(1)
for capture in captures:
print("Folder %s, Name %s, Host: %s, Size: %d, Status: %s" %
(capture['folder'], capture['name'], capture['agent']['hostName'], capture['size'], capture['status']))
|
Remove dots when small screen
|
const settings = {
autoplay: true,
infinite: true,
dots: true,
speed: 500,
pauseOnHover: true,
responsive: [
{
breakpoint: 100000,
settings: {
slidesToShow: 5,
slidesToScroll: 5,
infinite: true,
dots: true
}
},
{
breakpoint: 1500,
settings: {
slidesToShow: 3,
slidesToScroll: 3
}
},
{
breakpoint: 800,
settings: {
slidesToShow: 2,
slidesToScroll: 2,
initialSlide: 2
}
},
{
breakpoint: 480,
settings: {
slidesToShow: 1,
slidesToScroll: 1,
dots: false,
arrows: true
}
}
]
}
export { settings }
|
const settings = {
autoplay: true,
infinite: true,
dots: true,
speed: 500,
pauseOnHover: true,
responsive: [
{
breakpoint: 100000,
settings: {
slidesToShow: 5,
slidesToScroll: 5,
infinite: true,
dots: true
}
},
{
breakpoint: 1500,
settings: {
slidesToShow: 3,
slidesToScroll: 3
}
},
{
breakpoint: 800,
settings: {
slidesToShow: 2,
slidesToScroll: 2,
initialSlide: 2
}
},
{
breakpoint: 480,
settings: {
slidesToShow: 1,
slidesToScroll: 1
}
}
]
}
export { settings }
|
Add new logging style logs
|
package main
import (
"encoding/json"
"fmt"
"net/http"
)
var taskIndex = 0
func startHTTP() {
http.HandleFunc("/api/addtask", addTaskHandler)
log.Info.Printf("API listening on port %d", *port)
if err := http.ListenAndServe(fmt.Sprintf(":%d", *port), nil); err != nil {
log.Error.Fatalf("Failed to start listening on port %d", *port)
}
}
func addTaskHandler(w http.ResponseWriter, r *http.Request) {
if r.Method != "POST" {
w.Header().Add("Allow", "POST")
w.WriteHeader(http.StatusMethodNotAllowed)
log.Error.Printf("Received addtask request with unexpected method. want %q, got %q: %+v", "POST", r.Method, r)
}
defer r.Body.Close()
var task Task
err := json.NewDecoder(r.Body).Decode(&task)
if err != nil {
log.Error.Printf("Failed to parse JSON body from addtask request %+v: %+v", r, err)
// TODO(dhamon): Better error for this case.
w.WriteHeader(http.StatusInternalServerError)
return
}
if len(task.Id) == 0 {
task.Id = fmt.Sprintf("gozer-task-%d", taskIndex)
taskIndex += 1
}
taskstore.Add(&task)
w.WriteHeader(http.StatusOK)
}
|
package main
import (
"encoding/json"
"fmt"
"log"
"net/http"
)
var taskIndex = 0
func startHTTP() {
http.HandleFunc("/api/addtask", addTaskHandler)
log.Printf("api listening on port %d", *port)
if err := http.ListenAndServe(fmt.Sprintf(":%d", *port), nil); err != nil {
log.Fatalf("failed to start listening on port %d", *port)
}
}
func addTaskHandler(w http.ResponseWriter, r *http.Request) {
if r.Method != "POST" {
w.Header().Add("Allow", "POST")
w.WriteHeader(http.StatusMethodNotAllowed)
log.Printf("received addtask request with unexpected method. want %q, got %q: %+v", "POST", r.Method, r)
}
defer r.Body.Close()
var task Task
err := json.NewDecoder(r.Body).Decode(&task)
if err != nil {
log.Printf("ERROR: failed to parse JSON body from addtask request %+v: %+v", r, err)
// TODO(dhamon): Better error for this case.
w.WriteHeader(http.StatusInternalServerError)
return
}
if len(task.Id) == 0 {
task.Id = fmt.Sprintf("gozer-task-%d", taskIndex)
taskIndex += 1
}
taskstore.Add(&task)
w.WriteHeader(http.StatusOK)
}
|
Add option to define amount of gas sent in post request
|
const contractHelper = require('../contracts/contractHelpers.js');
const web3Connection = require('../web3.js');
const loggers = require('../loggers/events.js');
const web3 = web3Connection.web3;
const buySvc = {
buyTicket: (req, res) => {
const contractAddress = req.body.contractAddress; // address of deployed contract;
const fromAddress = req.body.fromAddress;
const name = req.body.name;
const eventContractInstance = web3.eth.contract(contractHelper.contractObj).at(contractAddress);
const opts = {
from: fromAddress,
value: req.body.price,
};
if (req.body.gas) {
opts.gas = req.body.gas;
}
eventContractInstance.buyTicket(name, opts, (err) => {
if (err) {
console.log(err);
loggers(eventContractInstance).ExceedQuota();
loggers(eventContractInstance).InsufficientEther();
res.sendStatus(500);
} else {
loggers(eventContractInstance).PurchaseTicket((error, result) => {
res.status(200).send(`Number of attendees: ${result.args._numAttendees.toString()}`);
});
}
});
},
};
module.exports = buySvc;
|
const contractHelper = require('../contracts/contractHelpers.js');
const web3Connection = require('../web3.js');
const loggers = require('../loggers/events.js');
const web3 = web3Connection.web3;
const buySvc = {
buyTicket: (req, res) => {
const contractAddress = req.body.contractAddress; // address of deployed contract;
const fromAddress = req.body.fromAddress;
const name = req.body.name;
const eventContractInstance = web3.eth.contract(contractHelper.contractObj).at(contractAddress);
eventContractInstance.buyTicket(name, {
from: fromAddress,
value: req.body.price,
}, (err) => {
if (err) {
console.log(err);
loggers(eventContractInstance).ExceedQuota();
loggers(eventContractInstance).InsufficientEther();
res.sendStatus(500);
} else {
loggers(eventContractInstance).PurchaseTicket((error, result) => {
res.status(200).send(`Number of attendees: ${result.args._numAttendees.toString()}`);
});
}
});
},
};
module.exports = buySvc;
|
Change lifetime to 1 week
|
'use strict';
var Memcached = require('memcached'),
replify = require('replify'),
Promise = require('rsvp').Promise;
module.exports = function(connection) {
var memcachedstore = new Memcached(connection);
var API = {
get: function(id){
return new Promise(function(resolve, reject) {
memcachedstore.get(id, function(err, val) {
if (err || val === false) {
reject(err);
} else {
resolve(val);
}
});
});
},
set: function(id, val, lifetime) {
lifetime = lifetime || 7 * 24 * 60 * 60 * 1000;
return new Promise(function(resolve, reject) {
memcachedstore.set(id, val, lifetime, function(err) {
if (err) {
reject(err);
} else {
resolve(val);
}
});
});
},
items: memcachedstore.items.bind(memcachedstore),
};
replify('memcache', API);
return API;
};
|
'use strict';
var Memcached = require('memcached'),
replify = require('replify'),
Promise = require('rsvp').Promise;
module.exports = function(connection) {
var memcachedstore = new Memcached(connection);
var API = {
get: function(id){
return new Promise(function(resolve, reject) {
memcachedstore.get(id, function(err, val) {
if (err || val === false) {
reject(err);
} else {
resolve(val);
}
});
});
},
set: function(id, val, lifetime) {
lifetime = lifetime || 365 * 24 * 60 * 60 * 1000;
return new Promise(function(resolve, reject) {
memcachedstore.set(id, val, lifetime, function(err) {
if (err) {
reject(err);
} else {
resolve(val);
}
});
});
},
items: memcachedstore.items.bind(memcachedstore),
};
replify('memcache', API);
return API;
};
|
Prepare development of new version.
|
# -*- coding: utf-8 -*-
# Copyright © 2009 Ben Finney <ben+python@benfinney.id.au>
# Copyright © 2006 Robert Niederreiter
#
# This is free software: you may copy, modify, and/or distribute this work
# under the terms of the Python Software Foundation License, version 2 or
# later as published by the Python Software Foundation.
# No warranty expressed or implied. See the file LICENSE.PSF-2 for details.
""" Library to implement a well-behaved Unix daemon process
This library implements PEP 3143: Standard daemon process library.
A well-behaved Unix daemon process is tricky to get right, but the
required steps are much the same for every daemon program. A
`DaemonContext` instance holds the behaviour and configured
process environment for the program; use the instance as a context
manager to enter a daemon state.
Simple example of usage::
import daemon
from spam import do_main_program
with daemon.DaemonContext():
do_main_program()
"""
from daemon import DaemonContext
version = "1.4.5"
|
# -*- coding: utf-8 -*-
# Copyright © 2009 Ben Finney <ben+python@benfinney.id.au>
# Copyright © 2006 Robert Niederreiter
#
# This is free software: you may copy, modify, and/or distribute this work
# under the terms of the Python Software Foundation License, version 2 or
# later as published by the Python Software Foundation.
# No warranty expressed or implied. See the file LICENSE.PSF-2 for details.
""" Library to implement a well-behaved Unix daemon process
This library implements PEP 3143: Standard daemon process library.
A well-behaved Unix daemon process is tricky to get right, but the
required steps are much the same for every daemon program. A
`DaemonContext` instance holds the behaviour and configured
process environment for the program; use the instance as a context
manager to enter a daemon state.
Simple example of usage::
import daemon
from spam import do_main_program
with daemon.DaemonContext():
do_main_program()
"""
from daemon import DaemonContext
version = "1.4.4"
|
Add spinner when remote markdown is loading
|
import React from 'react';
import {fetchMarkdown} from './client.js';
import marked from './marked';
import externalLinkSetup from './externalLinkSetup.js';
export default class About extends React.Component {
constructor() {
super();
this.state = {
markdownText: ''
};
}
get pageTitle() {
return {
'/blog/filters': 'Blog Filters'
}[window.location.pathname];
}
fetch() {
let src = `${window.location.pathname.substring(1)}.markdown`;
(this.props.fetchFunc || fetchMarkdown)(this.props.src || src).then((markdownText) =>
this.setState({markdownText}));
}
componentWillReceiveProps() {
this.fetch();
}
componentWillMount() {
this.fetch();
}
componentDidUpdate() {
externalLinkSetup(React.findDOMNode(this.refs.markdownContainer));
}
render() {
// Add progress indicator on first load
if (!this.state.markdownText) {
return <span className="fa fa-circle-o-notch fa-spin"/>;
}
return <div>
<div ref='markdownContainer' dangerouslySetInnerHTML={
{__html: marked(this.state.markdownText)}}/></div>;
}
}
|
import React from 'react';
import {fetchMarkdown} from './client.js';
import marked from './marked';
import externalLinkSetup from './externalLinkSetup.js';
export default class About extends React.Component {
constructor() {
super();
this.state = {
markdownText: ''
};
}
get pageTitle() {
return {
'/blog/filters': 'Blog Filters'
}[window.location.pathname];
}
fetch() {
let src = `${window.location.pathname.substring(1)}.markdown`;
(this.props.fetchFunc || fetchMarkdown)(this.props.src || src).then((markdownText) =>
this.setState({markdownText}));
}
componentWillReceiveProps() {
this.fetch();
}
componentWillMount() {
this.fetch();
}
componentDidUpdate() {
externalLinkSetup(React.findDOMNode(this.refs.markdownContainer));
}
render() {
return <div>
<div ref='markdownContainer' dangerouslySetInnerHTML={
{__html: marked(this.state.markdownText)}}/></div>;
}
}
|
Fix for duplicat field in Json serialisation.
|
package com.github.onsdigital.json.release;
import com.github.onsdigital.json.ContentType;
import com.github.onsdigital.json.Reference;
import com.github.onsdigital.json.dataset.Dataset;
import com.github.onsdigital.json.markdown.Article;
import com.github.onsdigital.json.markdown.Bulletin;
/**
* Represents a section in a release
*/
public class ReleaseSection extends Reference {
/**
* Creates an {@link Article} reference.
*
* @param article
* The item to be referenced.
*/
public ReleaseSection(Article article) {
super(article);
this.type = ContentType.article;
}
/**
* Creates a {@link Bulletin} reference.
*
* @param bulletin
* The item to be referenced.
*/
public ReleaseSection(Bulletin bulletin) {
super(bulletin);
this.type = ContentType.bulletin;
}
/**
* Creates a {@link Dataset} reference.
*
* @param dataset
* The item to be referenced.
*/
public ReleaseSection(Dataset dataset) {
super(dataset);
this.type = ContentType.dataset;
}
}
|
package com.github.onsdigital.json.release;
import com.github.onsdigital.json.ContentType;
import com.github.onsdigital.json.Reference;
import com.github.onsdigital.json.dataset.Dataset;
import com.github.onsdigital.json.markdown.Article;
import com.github.onsdigital.json.markdown.Bulletin;
/**
* Represents a section in a release
*/
public class ReleaseSection extends Reference {
public ContentType type;
/**
* Creates an {@link Article} reference.
*
* @param article
* The item to be referenced.
*/
public ReleaseSection(Article article) {
super(article);
this.type = ContentType.article;
}
/**
* Creates a {@link Bulletin} reference.
*
* @param bulletin
* The item to be referenced.
*/
public ReleaseSection(Bulletin bulletin) {
super(bulletin);
this.type = ContentType.bulletin;
}
/**
* Creates a {@link Dataset} reference.
*
* @param dataset
* The item to be referenced.
*/
public ReleaseSection(Dataset dataset) {
super(dataset);
this.type = ContentType.dataset;
}
}
|
Use System.import until jest supports it
|
import Definition, { asyncComponent } from 'hippo/screens/definition';
import Group from 'hippo/screens/group';
<% Hippo::Screen.each_group do | group | %>
Group.register( <%= group.to_json %> );
<% end -%>
const Screens = {};
<% Hippo::Screen.each do | screen | -%>
Screens['<%= screen.identifier %>'] = <%= screen.to_json %>;
Definition.register(
Screens['<%= screen.identifier %>'],
asyncComponent({
screen: <%= screen.to_json %>,
resolve: () => System.import(<%= "'#{screen.asset_path}'" %>)
}),
);
<% end -%>
<% if Hippo::Screen.enabled_group_ids %>
Group.enabled_group_ids = [<%= Hippo::Screen.enabled_group_ids.map{ |gid| "'#{gid}'" }.join(',') %>];
<% end %>
|
import Definition, { asyncComponent } from 'hippo/screens/definition';
import Group from 'hippo/screens/group';
<% Hippo::Screen.each_group do | group | %>
Group.register( <%= group.to_json %> );
<% end -%>
const Screens = {};
<% Hippo::Screen.each do | screen | -%>
Screens['<%= screen.identifier %>'] = <%= screen.to_json %>;
Definition.register(
Screens['<%= screen.identifier %>'],
asyncComponent({
screen: <%= screen.to_json %>,
resolve: () => import(<%= "'#{screen.asset_path}'" %>)
}),
);
<% end -%>
<% if Hippo::Screen.enabled_group_ids %>
Group.enabled_group_ids = [<%= Hippo::Screen.enabled_group_ids.map{ |gid| "'#{gid}'" }.join(',') %>];
<% end %>
|
Prepend library/ to the include_path to help prevent testing other copies of the framework.
git-svn-id: b6e219894c353fb1d215c78b2075057d8daadfba@604 44c647ce-9c0f-0410-b52a-842ac1e357ba
|
<?php
if (!defined('PHPUnit2_MAIN_METHOD')) {
define('PHPUnit2_MAIN_METHOD', 'AllTests::main');
}
require_once 'PHPUnit2/Framework/TestSuite.php';
require_once 'PHPUnit2/TextUI/TestRunner.php';
/**
* Read in user-defined test configuration if available; otherwise, read default test configuration
*/
if (is_readable('TestConfiguration.php')) {
require_once 'TestConfiguration.php';
} else {
require_once 'TestConfiguration.php.dist';
}
/**
* Prepend library/ to the include_path. This allows the tests to run out of the box and
* helps prevent finding other copies of the framework that might be present.
*/
set_include_path(dirname(dirname(__FILE__)) . DIRECTORY_SEPARATOR . 'library'
. PATH_SEPARATOR . get_include_path());
require_once 'ZendTest.php';
require_once 'Zend/AllTests.php';
class AllTests
{
public static function main()
{
PHPUnit2_TextUI_TestRunner::run(self::suite());
}
public static function suite()
{
$suite = new PHPUnit2_Framework_TestSuite('Zend Framework');
$suite->addTestSuite('ZendTest');
$suite->addTest(Zend_AllTests::suite());
return $suite;
}
}
if (PHPUnit2_MAIN_METHOD == 'AllTests::main') {
AllTests::main();
}
|
<?php
if (!defined('PHPUnit2_MAIN_METHOD')) {
define('PHPUnit2_MAIN_METHOD', 'AllTests::main');
}
require_once 'PHPUnit2/Framework/TestSuite.php';
require_once 'PHPUnit2/TextUI/TestRunner.php';
/**
* Read in user-defined test configuration if available; otherwise, read default test configuration
*/
if (is_readable('TestConfiguration.php')) {
require_once 'TestConfiguration.php';
} else {
require_once 'TestConfiguration.php.dist';
}
/**
* Append library/ to the include_path. This allows the tests to run out of the box and
* helps prevent finding other copies of the framework that might be present.
*/
set_include_path(get_include_path() . PATH_SEPARATOR
. dirname(dirname(__FILE__)) . DIRECTORY_SEPARATOR . 'library');
require_once 'ZendTest.php';
require_once 'Zend/AllTests.php';
class AllTests
{
public static function main()
{
PHPUnit2_TextUI_TestRunner::run(self::suite());
}
public static function suite()
{
$suite = new PHPUnit2_Framework_TestSuite('Zend Framework');
$suite->addTestSuite('ZendTest');
$suite->addTest(Zend_AllTests::suite());
return $suite;
}
}
if (PHPUnit2_MAIN_METHOD == 'AllTests::main') {
AllTests::main();
}
|
Add log message when running Gulp in development mode
|
var gulp = require('gulp');
var less = require('gulp-less');
var browserify = require('browserify');
var LessPluginCleanCSS = require('less-plugin-clean-css');
var cleancss = new LessPluginCleanCSS({advanced: true});
var source = require('vinyl-source-stream');
var uglify = require('gulp-uglify');
var streamify = require('gulp-streamify');
var gutil = require('gulp-util');
if (gutil.env.devel) {
gutil.log('Building in development mode');
}
gulp.task('browserify', function() {
browserify({
entries: ['./src/main/resources/static/js/tailstreamer.js'],
debug: true
})
.bundle()
.pipe(source('tailstreamer.js'))
.pipe(gutil.env.devel ? gutil.noop() : streamify(uglify()))
.pipe(gulp.dest('build/resources/main/static/js'));
});
gulp.task('less', function() {
gulp.src('src/main/resources/static/less/style.less')
.pipe(less({
plugins: [cleancss]
}))
.pipe(gulp.dest('build/resources/main/static/css'))
});
gulp.task('default', ['browserify', 'less']);
gulp.task('watch', function() {
gulp.watch('src/main/resources/static/js/*.js', ['browserify']);
gulp.watch('src/main/resources/static/less/*.less', ['less']);
});
|
var gulp = require('gulp');
var less = require('gulp-less');
var browserify = require('browserify');
var LessPluginCleanCSS = require('less-plugin-clean-css');
var cleancss = new LessPluginCleanCSS({advanced: true});
var source = require('vinyl-source-stream');
var uglify = require('gulp-uglify');
var streamify = require('gulp-streamify');
var gutil = require('gulp-util');
gulp.task('browserify', function() {
browserify({
entries: ['./src/main/resources/static/js/tailstreamer.js'],
debug: true
})
.bundle()
.pipe(source('tailstreamer.js'))
.pipe(gutil.env.devel ? gutil.noop() : streamify(uglify()))
.pipe(gulp.dest('build/resources/main/static/js'));
});
gulp.task('less', function() {
gulp.src('src/main/resources/static/less/style.less')
.pipe(less({
plugins: [cleancss]
}))
.pipe(gulp.dest('build/resources/main/static/css'))
});
gulp.task('default', ['browserify', 'less']);
gulp.task('watch', function() {
gulp.watch('src/main/resources/static/js/*.js', ['browserify']);
gulp.watch('src/main/resources/static/less/*.less', ['less']);
});
|
Clean up Ember.Application ext tests
|
/**
These tests ensure that Ember Data works with Ember.js' application
initialization and dependency injection APIs.
*/
var app;
module("Ember.Application Extensions", {
setup: function() {
var Router = Ember.Router.extend({
root: Ember.Route.extend()
});
Ember.run(function() {
app = Ember.Application.create({
Router: Router,
Store: DS.Store,
FooController: Ember.Controller.extend(),
ApplicationView: Ember.View.extend(),
ApplicationController: Ember.View.extend()
});
});
},
teardown: function() {
app.destroy();
}
});
test("If a Store property exists on an Ember.Application, it should be instantiated.", function() {
app.initialize();
ok(app.getPath('router.store') instanceof DS.Store, "the store was injected");
});
test("If a store is instantiated, it should be made available to each controller.", function() {
app.initialize();
ok(app.getPath('router.fooController.store') instanceof DS.Store, "the store was injected");
});
|
/**
These tests ensure that Ember Data works with Ember.js' application
initialization and dependency injection APIs.
*/
var app;
module("Ember.Application extensions", {
setup: function() {
var Router = Ember.Router.extend({
root: Ember.Route.extend()
});
Ember.run(function() {
app = Ember.Application.create({
Router: Router,
Store: DS.Store,
FooController: Ember.Controller.extend(),
ApplicationView: Ember.View.extend(),
ApplicationController: Ember.View.extend()
});
});
},
teardown: function() {
app.destroy();
}
});
test("it should inject a store instance into the router", function() {
app.initialize();
ok(app.getPath('router.store') instanceof DS.Store, "the store was injected");
});
test("it should inject the store into instantiated controllers", function() {
app.initialize();
ok(app.getPath('router.fooController.store') instanceof DS.Store, "the store was injected");
});
|
Put the script loading in test in a try/catch because it was causing an error in certain cases with Opera.
|
var loadScripts = function(js_files, onComplete){
var len = js_files.length;
var head = document.getElementsByTagName('head')[0];
function loadScript(index){
if (index >= len){
onComplete();
return;
}
try {
testOk = js_files[index].test();
} catch (e) {
// with certain browsers like opera the above test can fail
// because of undefined variables...
testOk = true;
}
if (testOk) {
var s = document.createElement('script');
s.src = js_files[index].src;
s.type = 'text/javascript';
head.appendChild(s);
s.onload = function(){
loadScript(index+1);
}
} else {
loadScript(index+1);
}
}
loadScript(0);
}
|
var loadScripts = function(js_files, onComplete){
var len = js_files.length;
var head = document.getElementsByTagName('head')[0];
function loadScript(index){
if (index >= len){
onComplete();
return;
}
if (js_files[index].test()){
// console.log('Loading ' + js_files[index].src);
var s = document.createElement('script');
s.src = js_files[index].src;
s.type = 'text/javascript';
head.appendChild(s);
s.onload = function(){
loadScript(index+1);
}
}
else{
loadScript(index+1);
}
}
loadScript(0);
}
|
Update migration to overwrite existing destination table.
I think migrations were failing because a "lookup" would end up creating
the destination table in the QA/Thor database, and then trying to use
the `renameCollection` method would silently fail because the table
already existed.
See:
https://docs.mongodb.org/manual/reference/method/db.collection.renameCollection/
|
<?php
use Illuminate\Database\Migrations\Migration;
class RenameClientTable extends Migration
{
/**
* The raw MongoDB interface.
* @var MongoDB
*/
protected $mongodb;
public function __construct()
{
$this->mongodb = app('db')->getMongoDB();
}
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
// The 'jenssegers/laravel-mongodb' package doesn't support Schema::rename so...
$this->mongodb->execute('db.api_keys.renameCollection("clients", true);');
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
$this->mongodb->execute('db.clients.renameCollection("api_keys", true);');
}
}
|
<?php
use Illuminate\Database\Migrations\Migration;
class RenameClientTable extends Migration
{
/**
* The raw MongoDB interface.
* @var MongoDB
*/
protected $mongodb;
public function __construct()
{
$this->mongodb = app('db')->getMongoDB();
}
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
// The 'jenssegers/laravel-mongodb' package doesn't support Schema::rename so...
$this->mongodb->execute('db.api_keys.renameCollection("clients");');
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
$this->mongodb->execute('db.clients.renameCollection("api_keys");');
}
}
|
Set mapping to standard for gamepad object.
|
export default class {
constructor (win, keys) {
this.keys = keys
this.win = win
this.id = 'keypad'
this.mapping = 'standard'
this.buttons = []
for (var key in this.keys) {
this.buttons[this.keys[key]] = {pressed: false}
}
this.onkey = function (event) {
if (event.which in this.keys) {
let pressed
if (event.type === 'keyup') {
pressed = true
} else if (event.type === 'keydown') {
pressed = false
}
this.buttons[this.keys[event.which]].pressed = pressed
this.timestamp = event.timeStamp
event.preventDefault()
}
}.bind(this)
this.connect()
}
connect () {
this.connected = true
this.win.addEventListener('keydown', this.onkey)
this.win.addEventListener('keyup', this.onkey)
}
disconnect () {
this.connected = false
this.win.removeEventListener('keydown', this.onkey)
this.win.removeEventListener('keyup', this.onkey)
}
}
|
export default class {
constructor (win, keys) {
this.keys = keys
this.win = win
this.id = 'keypad'
this.buttons = []
for (var key in this.keys) {
this.buttons[this.keys[key]] = {pressed: false}
}
this.onkey = function (event) {
if (event.which in this.keys) {
let pressed
if (event.type === 'keyup') {
pressed = true
} else if (event.type === 'keydown') {
pressed = false
}
this.buttons[this.keys[event.which]].pressed = pressed
this.timestamp = event.timeStamp
event.preventDefault()
}
}.bind(this)
this.connect()
}
connect () {
this.connected = true
this.win.addEventListener('keydown', this.onkey)
this.win.addEventListener('keyup', this.onkey)
}
disconnect () {
this.connected = false
this.win.removeEventListener('keydown', this.onkey)
this.win.removeEventListener('keyup', this.onkey)
}
}
|
Use python 2.7+ standard importlib instead of deprecated django importlib
|
from django.conf import settings
import importlib
def import_from_string(val, setting_name):
"""
Attempt to import a class from a string representation.
Based on the method of the same name in Django Rest Framework.
"""
try:
parts = val.split('.')
module_path, class_name = '.'.join(parts[:-1]), parts[-1]
module = importlib.import_module(module_path)
return getattr(module, class_name)
except ImportError as e:
raise ImportError("Could not import '{}' for CleanerVersion setting '{}'. {}: {}.".format(
(val, setting_name, e.__class__.__name__, e)))
_cache = {}
def get_versioned_delete_collector_class():
"""
Gets the class to use for deletion collection.
This is done as a method instead of just defining a module-level variable because
Django doesn't like attributes of the django.conf.settings object to be accessed
in top-level module scope.
:return: class
"""
key = 'VERSIONED_DELETE_COLLECTOR'
try:
cls = _cache[key]
except KeyError:
cls = import_from_string(getattr(settings, key, 'versions.deletion.VersionedCollector'), key)
_cache[key] = cls
return cls
|
from django.conf import settings
from django.utils import importlib
def import_from_string(val, setting_name):
"""
Attempt to import a class from a string representation.
Based on the method of the same name in Django Rest Framework.
"""
try:
parts = val.split('.')
module_path, class_name = '.'.join(parts[:-1]), parts[-1]
module = importlib.import_module(module_path)
return getattr(module, class_name)
except ImportError as e:
raise ImportError("Could not import '{}' for CleanerVersion setting '{}'. {}: {}.".format(
(val, setting_name, e.__class__.__name__, e)))
_cache = {}
def get_versioned_delete_collector_class():
"""
Gets the class to use for deletion collection.
This is done as a method instead of just defining a module-level variable because
Django doesn't like attributes of the django.conf.settings object to be accessed
in top-level module scope.
:return: class
"""
key = 'VERSIONED_DELETE_COLLECTOR'
try:
cls = _cache[key]
except KeyError:
cls = import_from_string(getattr(settings, key, 'versions.deletion.VersionedCollector'), key)
_cache[key] = cls
return cls
|
[lib] Reduce AutoReload ping to 3s.
|
<script type="text/javascript">
//
// Reload the app if server detects local change
//
(function() {
function checkForReload() {
var xhr = new XMLHttpRequest;
xhr.open('get', 'http://' + document.location.host + '/autoreload', true);
xhr.setRequestHeader('X-Requested-With','XMLHttpRequest');
xhr.onreadystatechange = function() {
if (this.readyState === 4 && /^[2]/.test(this.status)) {
var response = JSON.parse(this.responseText);
if (response.outdated) {
window.location.reload();
}
}
}
xhr.send();
}
setInterval(checkForReload, 1000 * 3);
})(window);
</script>
|
<script type="text/javascript">
//
// Reload the app if server detects local change
//
(function() {
function checkForReload() {
var xhr = new XMLHttpRequest;
xhr.open('get', 'http://' + document.location.host + '/autoreload', true);
xhr.setRequestHeader('X-Requested-With','XMLHttpRequest');
xhr.onreadystatechange = function() {
if (this.readyState === 4 && /^[2]/.test(this.status)) {
var response = JSON.parse(this.responseText);
if (response.outdated) {
window.location.reload();
}
}
}
xhr.send();
}
setInterval(checkForReload, 1000*5);
})(window);
</script>
|
Use old style string formatting to appease readthedocs aging 3.5 interpreter.
|
# pyinfra
# File: docs/conf.py
# Desc: minimal Sphinx config
from datetime import date, datetime
from better import better_theme_path
from pyinfra import __version__
_today = date.today()
copyright = '{0}, Nick Barrett'.format(datetime.strftime(_today, '%Y'))
extensions = [
# Official
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
]
source_suffix = '.rst'
master_doc = 'index'
project = 'pyinfra'
author = 'Fizzadar'
version = 'develop'
pygments_style = 'sphinx'
# Theme style override
html_title = 'pyinfra {0}'.format(__version__)
html_short_title = 'Home'
html_theme = 'better'
html_theme_path = [better_theme_path]
html_static_path = ['static']
templates_path = ['templates']
html_sidebars = {
'**': ['pyinfra_sidebar.html'],
}
html_theme_options = {
'cssfiles': ['_static/pyinfra.css'],
'scriptfiles': ['_static/sidebar.js'],
}
|
# pyinfra
# File: docs/conf.py
# Desc: minimal Sphinx config
from datetime import date, datetime
from better import better_theme_path
from pyinfra import __version__
_today = date.today()
copyright = f'{datetime.strftime(_today, "%Y")}, Nick Barrett'
extensions = [
# Official
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
]
source_suffix = '.rst'
master_doc = 'index'
project = 'pyinfra'
author = 'Fizzadar'
version = 'develop'
pygments_style = 'sphinx'
# Theme style override
html_title = 'pyinfra {0}'.format(__version__)
html_short_title = 'Home'
html_theme = 'better'
html_theme_path = [better_theme_path]
html_static_path = ['static']
templates_path = ['templates']
html_sidebars = {
'**': ['pyinfra_sidebar.html'],
}
html_theme_options = {
'cssfiles': ['_static/pyinfra.css'],
'scriptfiles': ['_static/sidebar.js'],
}
|
Add autoFocus prop to popup main <input>
- fixes it now in Chrome
- FF currently breaks this, but apparently will be fixed in FF60: https://bugzilla.mozilla.org/show_bug.cgi?id=1324255
|
import React from 'react'
import PropTypes from 'prop-types'
import styles from './Popup.css'
const Search = ({ onSearchEnter, onSearchChange, searchValue }) => (
<form className={styles.searchContainer}>
<input
autoFocus
className={styles.search}
name="query"
placeholder="Search your memory"
autoComplete="off"
onKeyDown={onSearchEnter}
onChange={onSearchChange}
value={searchValue}
/>
<i className="material-icons">search</i>
</form>
)
Search.propTypes = {
onSearchEnter: PropTypes.func.isRequired,
onSearchChange: PropTypes.func.isRequired,
searchValue: PropTypes.string.isRequired,
}
export default Search
|
import React from 'react'
import PropTypes from 'prop-types'
import styles from './Popup.css'
const Search = ({ onSearchEnter, onSearchChange, searchValue }) => (
<form className={styles.searchContainer}>
<input
className={styles.search}
name="query"
placeholder="Search your memory"
autoComplete="off"
onKeyDown={onSearchEnter}
onChange={onSearchChange}
value={searchValue}
/>
<i className="material-icons">search</i>
</form>
)
Search.propTypes = {
onSearchEnter: PropTypes.func.isRequired,
onSearchChange: PropTypes.func.isRequired,
searchValue: PropTypes.string.isRequired,
}
export default Search
|
Add one bigger size to arithmetic benchmark
|
import numpy as np
from dynd import nd, ndt
from benchrun import Benchmark, clock
class ArithmeticBenchmark(Benchmark):
parameters = ('size',)
size = [100000, 10000000, 100000000]
def run(self, size):
a = nd.uniform(dst_tp = ndt.type('{} * float64'.format(size)))
b = nd.uniform(dst_tp = ndt.type('{} * float64'.format(size)))
start = clock()
a + b
stop = clock()
return stop - start
class NumPyArithmeticBenchmark(Benchmark):
parameters = ('size',)
size = [100000, 10000000, 100000000]
def run(self, size):
a = np.random.uniform(size = size)
b = np.random.uniform(size = size)
start = clock()
a + b
stop = clock()
return stop - start
if __name__ == '__main__':
benchmark = ArithmeticBenchmark()
benchmark.print_result()
benchmark = NumPyArithmeticBenchmark()
benchmark.print_result()
|
import numpy as np
from dynd import nd, ndt
from benchrun import Benchmark, clock
class ArithemticBenchmark(Benchmark):
parameters = ('size',)
size = [100000, 10000000]
def run(self, size):
a = nd.uniform(dst_tp = ndt.type('{} * float64'.format(size)))
b = nd.uniform(dst_tp = ndt.type('{} * float64'.format(size)))
start = clock()
a + b
stop = clock()
return stop - start
class NumPyArithemticBenchmark(Benchmark):
parameters = ('size',)
size = [100000, 10000000]
def run(self, size):
a = np.random.uniform(size = size)
b = np.random.uniform(size = size)
start = clock()
a + b
stop = clock()
return stop - start
if __name__ == '__main__':
benchmark = ArithemticBenchmark()
benchmark.print_result()
benchmark = NumPyArithemticBenchmark()
benchmark.print_result()
|
Prepare for next dev cycle
|
#!/usr/bin/env python
# coding: utf-8
from setuptools import setup, find_packages
setup(
name="bentoo",
description="Benchmarking tools",
version="0.13.dev",
packages=find_packages(),
scripts=["scripts/bentoo-generator.py", "scripts/bentoo-runner.py",
"scripts/bentoo-collector.py", "scripts/bentoo-analyser.py",
"scripts/bentoo-likwid-metric.py", "scripts/bentoo-quickstart.py",
"scripts/bentoo-calltree.py", "scripts/bentoo-merge.py",
"scripts/bentoo-calltree-analyser.py"],
package_data={
'': ['*.adoc', '*.rst', '*.md']
},
author="Zhang YANG",
author_email="zyangmath@gmail.com",
license="PSF",
keywords="Benchmark;Performance Analysis",
url="http://github.com/ProgramFan/bentoo"
)
|
#!/usr/bin/env python
# coding: utf-8
from setuptools import setup, find_packages
setup(
name="bentoo",
description="Benchmarking tools",
version="0.12",
packages=find_packages(),
scripts=["scripts/bentoo-generator.py", "scripts/bentoo-runner.py",
"scripts/bentoo-collector.py", "scripts/bentoo-analyser.py",
"scripts/bentoo-likwid-metric.py", "scripts/bentoo-quickstart.py",
"scripts/bentoo-calltree.py", "scripts/bentoo-merge.py",
"scripts/bentoo-calltree-analyser.py"],
package_data={
'': ['*.adoc', '*.rst', '*.md']
},
author="Zhang YANG",
author_email="zyangmath@gmail.com",
license="PSF",
keywords="Benchmark;Performance Analysis",
url="http://github.com/ProgramFan/bentoo"
)
|
Add Semantic UI into the app layout.
|
<!DOCTYPE html>
<html lang="en" prefix="dcterms: http://purl.org/dc/terms/ og: http://ogp.me/ns#">
<head>
<meta charset="utf-8">
<meta http-equiv="x-ua-compatible" content="IE=edge">
<meta name="description" content="@yield('description')">
<meta name="author" content="@yield('author')">
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta property="dcterms:modified" content="@yield('timestamp')">
<title>@yield('title', 'v.ideogam.es')</title>
<link rel="canonical" href="@yield('canonical_url')">
<link rel="stylesheet" href="//oss.maxcdn.com/semantic-ui/2.1.8/semantic.min.css">
<link rel="stylesheet" href="{{ asset('css/app.css') }}">
</head>
<body>
@yield('content')
</body>
</html>
|
<!DOCTYPE html>
<html lang="en" prefix="dcterms: http://purl.org/dc/terms/ og: http://ogp.me/ns#">
<head>
<meta charset="utf-8">
<meta http-equiv="x-ua-compatible" content="IE=edge">
<meta name="description" content="@yield('description')">
<meta name="author" content="@yield('author')">
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta property="dcterms:modified" content="@yield('timestamp')">
<title>@yield('title')</title>
<link rel="canonical" href="@yield('canonical_url')">
</head>
<body>
@yield('content')
</body>
</html>
|
[BPK-989] Fix spinner theme attributes export
|
/*
* Backpack - Skyscanner's Design System
*
* Copyright 2017 Skyscanner Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import BpkSpinner from './src/BpkSpinner';
import SPINNER_TYPES from './src/spinnerTypes';
import BpkLargeSpinner from './src/BpkLargeSpinner';
import BpkExtraLargeSpinner from './src/BpkExtraLargeSpinner';
import themeAttributes from './src/themeAttributes';
export { BpkSpinner, BpkLargeSpinner, BpkExtraLargeSpinner, SPINNER_TYPES, themeAttributes };
|
/*
* Backpack - Skyscanner's Design System
*
* Copyright 2017 Skyscanner Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import BpkSpinner from './src/BpkSpinner';
import SPINNER_TYPES from './src/spinnerTypes';
import BpkLargeSpinner from './src/BpkLargeSpinner';
import BpkExtraLargeSpinner from './src/BpkExtraLargeSpinner';
import { themeAttributes } from './src/themeAttributes';
export { BpkSpinner, BpkLargeSpinner, BpkExtraLargeSpinner, SPINNER_TYPES, themeAttributes };
|
Allow detail to be falsy
|
const CustomEvent = ((Event) => {
if (Event) {
try {
new Event(); // eslint-disable-line no-new
} catch (e) {
return undefined;
}
}
return Event;
})(window.CustomEvent);
function createCustomEvent(name, opts = {}) {
let e;
if (Event) {
e = new Event(name, opts);
if ('detail' in opts) {
Object.defineProperty(e, 'detail', { value: opts.detail });
}
} else {
e = document.createEvent('CustomEvent');
e.initCustomEvent(name, opts.bubbles, opts.cancelable, opts.detail);
}
return e;
}
export default function (elem, name, opts = {}) {
if (opts.bubbles === undefined) {
opts.bubbles = true;
}
if (opts.cancelable === undefined) {
opts.cancelable = true;
}
if (opts.composed === undefined) {
opts.composed = true;
}
return elem.dispatchEvent(createCustomEvent(name, opts));
}
|
const CustomEvent = ((Event) => {
if (Event) {
try {
new Event(); // eslint-disable-line no-new
} catch (e) {
return undefined;
}
}
return Event;
})(window.CustomEvent);
function createCustomEvent(name, opts = {}) {
let e;
if (Event) {
e = new Event(name, opts);
if (opts.detail) {
Object.defineProperty(e, 'detail', { value: opts.detail });
}
} else {
e = document.createEvent('CustomEvent');
e.initCustomEvent(name, opts.bubbles, opts.cancelable, opts.detail);
}
return e;
}
export default function (elem, name, opts = {}) {
if (opts.bubbles === undefined) {
opts.bubbles = true;
}
if (opts.cancelable === undefined) {
opts.cancelable = true;
}
if (opts.composed === undefined) {
opts.composed = true;
}
return elem.dispatchEvent(createCustomEvent(name, opts));
}
|
Add limitation of this defence in the comment
|
import torchaudio
import librosa
# There exist a limitation of this defense that it may lead to the problem of aliasing, and we can use the narrowband sample rate
# rather than downsampling followed by upsampling.
# resampling reference https://core.ac.uk/download/pdf/228298313.pdf
# resampling input transformation defense for audio
T = torchaudio.transforms
# Read audio file
audio_data = librosa.load(files, sr=16000)[0][-19456:]
audio_data = torch.tensor(audio_data).float().to(device)
# Discarding samples from a waveform during downsampling could remove a significant portion of the adversarial perturbation, thereby prevents an adversarial attack.
# resample the audio files to 8kHz from 16kHz
sample = T.Resample(16000, 8000, resampling_method="sinc_interpolation")
audio_resample_1 = sample(audio_data)
# resample the audio back to 16kHz
sample = T.Resample(8000, 16000, resampling_method="sinc_interpolation")
# Give audio_resample_2 as input to the asr model
audio_resample_2 = sample(audio_resample_1)
|
import torchaudio
import librosa
# resampling reference https://core.ac.uk/download/pdf/228298313.pdf
# resampling input transformation defense for audio
T = torchaudio.transforms
# Read audio file
audio_data = librosa.load(files, sr=16000)[0][-19456:]
audio_data = torch.tensor(audio_data).float().to(device)
# Discarding samples from a waveform during downsampling could remove a significant portion of the adversarial perturbation, thereby prevents an adversarial attack.
# resample the audio files to 8kHz from 16kHz
sample = T.Resample(16000, 8000, resampling_method="sinc_interpolation")
audio_resample_1 = sample(audio_data)
# resample the audio back to 16kHz
sample = T.Resample(8000, 16000, resampling_method="sinc_interpolation")
# Give audio_resample_2 as input to the asr model
audio_resample_2 = sample(audio_resample_1)
|
Support for web url widget creator
|
package uk.ac.edukapp.util;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.validator.routines.UrlValidator;
public class ServletUtils {
public static String getCookieValue(Cookie[] cookies, String cookieName,
String defaultValue) {
for (int i = 0; i < cookies.length; i++) {
Cookie cookie = cookies[i];
if (cookieName.equals(cookie.getName()))
return (cookie.getValue());
}
return (defaultValue);
}
public static String getServletRootURL ( HttpServletRequest request ) {
String serverName = request.getServerName();
int serverPort = request.getServerPort();
String protocol;
if (request.isSecure()){
protocol = "https";
}
else {
protocol = "http";
}
String root = protocol+"://"+serverName+":"+serverPort+"/edukapp";
return root;
}
public static boolean isNumeric(String str) {
return str.matches("-?\\d+(.\\d+)?");
}
public static boolean checkURL ( String url ) {
String schemes[] = {"http", "https"};
UrlValidator urlValidator = new UrlValidator(schemes);
return urlValidator.isValid(url);
}
}
|
package uk.ac.edukapp.util;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
public class ServletUtils {
public static String getCookieValue(Cookie[] cookies, String cookieName,
String defaultValue) {
for (int i = 0; i < cookies.length; i++) {
Cookie cookie = cookies[i];
if (cookieName.equals(cookie.getName()))
return (cookie.getValue());
}
return (defaultValue);
}
public static String getServletRootURL ( HttpServletRequest request ) {
String serverName = request.getServerName();
int serverPort = request.getServerPort();
String protocol;
if (request.isSecure()){
protocol = "https";
}
else {
protocol = "http";
}
String root = protocol+"://"+serverName+":"+serverPort+"/edukapp";
return root;
}
public static boolean isNumeric(String str) {
return str.matches("-?\\d+(.\\d+)?");
}
}
|
Add some more output examples
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
"""Exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = ('stylus', 'vue')
selectors = {'vue': 'source.stylus.embedded.html'}
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.0'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
# 177:24 warning hexidecimal color should be a variable colors
# 177 warning hexidecimal color should be a variable colors
^(?P<line>\d+):?(?P<col>\d+)?\s*(?P<rule>\w+)?\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
"""Exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = ('stylus', 'vue')
selectors = {'vue': 'source.stylus.embedded.html'}
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.0'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
^(?P<line>\d+):?(?P<col>\d+)?\s*(?P<rule>\w+)?\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
|
Fix product patterm form type
|
<?php
namespace Furniture\ProductBundle\Form\Type\ProductPattern;
use Furniture\ProductBundle\Entity\ProductPartPatternVariantSelection;
use Furniture\ProductBundle\Entity\ProductVariantsPattern;
use Furniture\ProductBundle\Form\Type\Pattern\PatternType;
use Symfony\Component\Form\FormBuilderInterface;
use Symfony\Component\OptionsResolver\OptionsResolver;
class ProductPatternType extends PatternType
{
/**
* {@inheritDoc}
*/
public function configureOptions(OptionsResolver $resolver)
{
parent::configureOptions($resolver);
$resolver->setDefaults([
'data_class' => ProductVariantsPattern::class,
'variant_selection_class' => ProductPartPatternVariantSelection::class
]);
}
/**
* {@inheritDoc}
*/
public function buildForm(FormBuilderInterface $builder, array $options)
{
parent::buildForm($builder, $options);
$builder
->add('price', 'sylius_money', [
'label' => 'Price'
]);
}
/**
* {@inheritDoc}
*/
public function getName()
{
return 'product_pattern';
}
}
|
<?php
namespace Furniture\ProductBundle\Form\Type\ProductPattern;
use Furniture\ProductBundle\Entity\ProductPartPatternVariantSelection;
use Furniture\ProductBundle\Entity\ProductVariantsPattern;
use Furniture\ProductBundle\Form\Type\Pattern\PatternType;
use Symfony\Component\Form\FormBuilderInterface;
use Symfony\Component\OptionsResolver\OptionsResolver;
class ProductPatternType extends PatternType
{
/**
* {@inheritDoc}
*/
public function configureOptions(OptionsResolver $resolver)
{
parent::configureOptions($resolver);
$resolver->setDefaults([
'data_class' => ProductVariantsPattern::class,
'variant_selection_class' => ProductPartPatternVariantSelection::class
]);
}
/**
* {@inheritDoc}
*/
public function buildForm(FormBuilderInterface $builder, array $options)
{
parent::buildForm($builder, $options);
$builder
->add('price', 'number', [
'label' => 'Price'
]);
}
/**
* {@inheritDoc}
*/
public function getName()
{
return 'product_pattern';
}
}
|
[Form] Replace methods in ChoiceView by public properties (PHP +100ms, Twig +400ms)
|
<?php foreach ($options as $index => $choice): ?>
<?php if ($view['form']->isChoiceGroup($choice)): ?>
<optgroup label="<?php echo $view->escape($view['translator']->trans($index, array(), $translation_domain)) ?>">
<?php foreach ($choice as $nested_choice): ?>
<option value="<?php echo $view->escape($nested_choice->value) ?>"<?php if ($view['form']->isChoiceSelected($form, $nested_choice)): ?> selected="selected"<?php endif?>><?php echo $view->escape($view['translator']->trans($nested_choice->label, array(), $translation_domain)) ?></option>
<?php endforeach ?>
</optgroup>
<?php else: ?>
<option value="<?php echo $view->escape($choice->value) ?>"<?php if ($view['form']->isChoiceSelected($form, $choice)): ?> selected="selected"<?php endif?>><?php echo $view->escape($view['translator']->trans($choice->label, array(), $translation_domain)) ?></option>
<?php endif ?>
<?php endforeach ?>
|
<?php foreach ($options as $index => $choice): ?>
<?php if ($view['form']->isChoiceGroup($choice)): ?>
<optgroup label="<?php echo $view->escape($view['translator']->trans($index, array(), $translation_domain)) ?>">
<?php foreach ($choice as $nested_choice): ?>
<option value="<?php echo $view->escape($nested_choice->getValue()) ?>"<?php if ($view['form']->isChoiceSelected($form, $nested_choice)): ?> selected="selected"<?php endif?>><?php echo $view->escape($view['translator']->trans($nested_choice->getLabel(), array(), $translation_domain)) ?></option>
<?php endforeach ?>
</optgroup>
<?php else: ?>
<option value="<?php echo $view->escape($choice->getValue()) ?>"<?php if ($view['form']->isChoiceSelected($form, $choice)): ?> selected="selected"<?php endif?>><?php echo $view->escape($view['translator']->trans($choice->getLabel(), array(), $translation_domain)) ?></option>
<?php endif ?>
<?php endforeach ?>
|
logout: Clean up logout method call.
This was a silent type error -- we were passing an argument to a
function that doesn't expect any. Make the code less confused.
[greg: revised commit message]
|
/* @flow */
import React, { PureComponent } from 'react';
import { StyleSheet } from 'react-native';
import type { Actions, Auth } from '../types';
import connectWithActions from '../connectWithActions';
import { ZulipButton } from '../common';
import { unregisterPush } from '../api';
import { getAuth, getPushToken } from '../selectors';
import { logErrorRemotely } from '../utils/logging';
const styles = StyleSheet.create({
logoutButton: {
flex: 1,
margin: 8,
},
});
type Props = {
auth: Auth,
pushToken: string,
actions: Actions,
};
class LogoutButton extends PureComponent<Props> {
props: Props;
shutdownPUSH = async () => {
const { auth, actions, pushToken } = this.props;
if (pushToken !== '') {
try {
await unregisterPush(auth, pushToken);
} catch (e) {
logErrorRemotely(e, 'failed to unregister Push token');
}
actions.deleteTokenPush();
}
};
logout = () => {
const { actions } = this.props;
this.shutdownPUSH();
actions.logout();
};
render() {
return (
<ZulipButton style={styles.logoutButton} secondary text="Logout" onPress={this.logout} />
);
}
}
export default connectWithActions(state => ({
auth: getAuth(state),
pushToken: getPushToken(state),
}))(LogoutButton);
|
/* @flow */
import React, { PureComponent } from 'react';
import { StyleSheet } from 'react-native';
import type { Actions, Auth } from '../types';
import connectWithActions from '../connectWithActions';
import { ZulipButton } from '../common';
import { unregisterPush } from '../api';
import { getAuth, getAccounts, getPushToken } from '../selectors';
import { logErrorRemotely } from '../utils/logging';
const styles = StyleSheet.create({
logoutButton: {
flex: 1,
margin: 8,
},
});
type Props = {
accounts: any[],
auth: Auth,
pushToken: string,
actions: Actions,
};
class LogoutButton extends PureComponent<Props> {
props: Props;
shutdownPUSH = async () => {
const { auth, actions, pushToken } = this.props;
if (pushToken !== '') {
try {
await unregisterPush(auth, pushToken);
} catch (e) {
logErrorRemotely(e, 'failed to unregister Push token');
}
actions.deleteTokenPush();
}
};
logout = () => {
const { accounts, actions } = this.props;
this.shutdownPUSH();
actions.logout(accounts);
};
render() {
return (
<ZulipButton style={styles.logoutButton} secondary text="Logout" onPress={this.logout} />
);
}
}
export default connectWithActions(state => ({
auth: getAuth(state),
accounts: getAccounts(state),
pushToken: getPushToken(state),
}))(LogoutButton);
|
Use a more sensible output file name
|
#!/usr/bin/env node
var fs = require("fs");
var glob = require("glob");
var ts = require("typescript");
var tsa = require("../lib/Analyzer");
var o = require("../lib/JsonOutput");
var arg = process.argv.slice(2)[0];
const fileNames = glob(arg, {}, function(err, files) {
if (err) {
console.log(err);
process.exit(1);
}
var outputter = new o.Output.JsonOutput("ts-analysis-results.json");
var analyzer = new tsa.SonarTypeScript.Analyzer(outputter);
files.forEach(fileName => {
console.log("Parsing: " + fileName);
var sourceFile = ts.createSourceFile(
fileName,
fs.readFileSync(fileName).toString(),
ts.ScriptTarget.ES6,
/*setParentNodes */ true);
analyzer.analyzeFile(sourceFile);
});
outputter.close();
});
|
#!/usr/bin/env node
var fs = require("fs");
var glob = require("glob");
var ts = require("typescript");
var tsa = require("../lib/Analyzer");
var o = require("../lib/JsonOutput");
var arg = process.argv.slice(2)[0];
const fileNames = glob(arg, {}, function(err, files) {
if (err) {
console.log(err);
process.exit(1);
}
var outputter = new o.Output.JsonOutput("output.json");
var analyzer = new tsa.SonarTypeScript.Analyzer(outputter);
files.forEach(fileName => {
console.log("Parsing: " + fileName);
var sourceFile = ts.createSourceFile(
fileName,
fs.readFileSync(fileName).toString(),
ts.ScriptTarget.ES6,
/*setParentNodes */ true);
analyzer.analyzeFile(sourceFile);
});
outputter.close();
});
|
Use AssertJ instead of JUnit expected to assert thrown exception.
|
package de.mvitz.jprops.core.api;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.failBecauseExceptionWasNotThrown;
import static org.mockito.BDDMockito.given;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
@RunWith(MockitoJUnitRunner.class)
public class PropertiesInjectorTest {
@Mock
private PropertyProvider provider;
@InjectMocks
private PropertiesInjector injector;
private UnknownType unknownType;
@Test
public void shouldThrowExceptionForUnknownType() throws Exception {
given(provider.get("unknownType")).willReturn("foo");
try {
injector.injectInto(this);
failBecauseExceptionWasNotThrown(InvalidPropertiesException.class);
} catch (final InvalidPropertiesException e) {
assertThat(e).hasNoCause().hasMessage("No converter found for type: UnknownType");
assertThat(unknownType).isNull();
}
}
public interface UnknownType {
}
}
|
package de.mvitz.jprops.core.api;
import static org.mockito.BDDMockito.given;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
@RunWith(MockitoJUnitRunner.class)
public class PropertiesInjectorTest {
@Mock
private PropertyProvider provider;
@InjectMocks
private PropertiesInjector injector;
@SuppressWarnings("unused")
private UnknownType unknownType;
@Test(expected = InvalidPropertiesException.class)
public void shouldThrowExceptionForUnknownType() throws Exception {
given(provider.get("unknownType")).willReturn("foo");
injector.injectInto(this);
}
public interface UnknownType {
}
}
|
Add optional parameters for starting location
|
#!/usr/bin/env python
import sys, time
import math
class Ball:
gravity = -3 # Dots per second squared
def __init__(self, x=0, y=0):
self.r = 255
self.g = 0
self.b = 0
self.x = 0
self.y = 0
self.vx = 0
self.vy = 0
def updateValues(self, timeElapsed=1): # timeElapsed in seconds
self.x += self.vx * timeElapsed
self.y += self.vy * timeElapsed + 0.5 * Ball.gravity * timeElapsed ** 2
self.vy += Ball.gravity * timeElapsed
def bounceOnEdge():
if self.x < 0 || self.x > 16
self.vx *= -1
if self.y < 0 || self.y > 16
self.vy *= -1
def drawOnMatrix(self, ledMatrix):
ledMatrix.SetPixel(int(self.x), int(self.y), self.r, self.g, self.b)
def printValues(self):
print "x: %.2f, y: %.2f - vx: %.2f vy: %.2f" % (self.x, self.y, self.vx, self.vy)
|
#!/usr/bin/env python
import sys, time
import math
class Ball:
gravity = -3 # Dots per second squared
def __init__(self):
self.r = 255
self.g = 0
self.b = 0
self.x = 0
self.y = 0
self.vx = 0
self.vy = 0
def updateValues(self, timeElapsed=1): # timeElapsed in seconds
self.x += self.vx * timeElapsed
self.y += self.vy * timeElapsed + 0.5 * Ball.gravity * timeElapsed ** 2
self.vy += Ball.gravity * timeElapsed
def bounceOnEdge():
if self.x < 0 || self.x > 16
self.vx *= -1
if self.y < 0 || self.y > 16
self.vy *= -1
def drawOnMatrix(self, ledMatrix):
ledMatrix.SetPixel(int(self.x), int(self.y), self.r, self.g, self.b)
def printValues(self):
print "x: %.2f, y: %.2f - vx: %.2f vy: %.2f" % (self.x, self.y, self.vx, self.vy)
|
Enhancement: Use {{!}} instead of ! for help.
|
var BLTPlugin = {
init: function (client, imports) {
return {
handlers: {
'!givemeblt': function (command) {
client.act(command.channel, 'gives a juicy BLT to ' + command.nickname);
}
},
help: {
'givemeblt': [
'{{!}}givemeblt',
'Gives the requestor a juicy BLT.'
]
},
commands: ['givemeblt']
}
}
};
module.exports = BLTPlugin;
|
var BLTPlugin = {
init: function (client, imports) {
return {
handlers: {
'!givemeblt': function (command) {
client.act(command.channel, 'gives a juicy BLT to ' + command.nickname);
}
},
help: {
'givemeblt': [
'!givemeblt',
'Gives the requestor a juicy BLT.'
]
},
commands: ['givemeblt']
}
}
};
module.exports = BLTPlugin;
|
Add run command to application
|
<?php
/*
* This file is part of the PHP To 7 Aid project.
*
* (c) Giso Stallenberg <gisostallenberg@gmail.com>
*
* This source file is subject to the MIT license that is bundled
* with this source code in the file LICENSE.
*/
namespace GisoStallenberg\phpTo7aid\Console;
use GisoStallenberg\phpTo7aid\Console\Command\RunCommand;
use Symfony\Component\Console\Application as BaseApplication;
/**
* @author Giso Stallenberg <gisostallenberg@gmail.com>
*/
class Application extends BaseApplication
{
/**
* Constructor.
*/
public function __construct()
{
error_reporting(-1);
parent::__construct('PHP To 7 Aid', '0.0.0');
$this->add(new RunCommand() );
}
public function getLongVersion()
{
$version = parent::getLongVersion().' by <comment>Giso Stallenberg</comment>';
$commit = '@git-commit@';
if ('@'.'git-commit@' !== $commit) {
$version .= ' ('.substr($commit, 0, 7).')';
}
return $version;
}
}
|
<?php
/*
* This file is part of the PHP To 7 Aid project.
*
* (c) Giso Stallenberg <gisostallenberg@gmail.com>
*
* This source file is subject to the MIT license that is bundled
* with this source code in the file LICENSE.
*/
namespace GisoStallenberg\phpTo7aid\Console;
use Symfony\Component\Console\Application as BaseApplication;
/**
* @author Giso Stallenberg <gisostallenberg@gmail.com>
*/
class Application extends BaseApplication
{
/**
* Constructor.
*/
public function __construct()
{
error_reporting(-1);
parent::__construct('PHP To 7 Aid', '0.0.0');
}
public function getLongVersion()
{
$version = parent::getLongVersion().' by <comment>Giso Stallenberg</comment>';
$commit = '@git-commit@';
if ('@'.'git-commit@' !== $commit) {
$version .= ' ('.substr($commit, 0, 7).')';
}
return $version;
}
}
|
Use better name and fix undefined references
|
import cli from 'cli';
import {Gpio} from 'chip-gpio';
import sensor from 'ds18x20';
var options = cli.parse();
var interval = 2000;
var threshold = 25;
var heater = new Gpio(0, 'out');
function setHeater(on) {
console.log('Heater:', on ? 'on' : 'off');
heater.write(on ? 0 : 1);
}
function setHeaterOn() {
setHeater(true);
}
function setHeaterOff() {
setHeater(false);
}
sensor.isDriverLoaded((err, isLoaded) => {
console.log(isLoaded);
sensor.list((err, listOfDeviceIds) => {
console.log(listOfDeviceIds);
});
setInterval(() => {
sensor.getAll((err, tempObj) => {
var sum = 0;
var len = 0;
tempObj.forEach(obj => {
console.log(obj);
sum += obj;
len++;
});
var average = sum / len;
console.log(average);
setHeater(average < threshold);
});
}, interval);
});
|
import cli from 'cli';
import {Gpio} from 'chip-gpio';
import temp from 'ds18x20';
var options = cli.parse();
var interval = 2000;
var threshold = 25;
var heater = new Gpio(0, 'out');
function setHeater(on) {
console.log('Heater:', on ? 'on' : 'off');
heater.write(on ? 0 : 1);
}
function setHeaterOn() {
setHeater(true);
}
function setHeaterOff() {
setHeater(false);
}
sensor.isDriverLoaded((err, isLoaded) => {
console.log(isLoaded);
sensor.list((err, listOfDeviceIds) => {
console.log(listOfDeviceIds);
});
setInterval(() => {
sensor.getAll((err, tempObj) => {
var sum = 0;
var len = 0;
tempObj.forEach(obj => {
console.log(obj);
sum += obj;
len++;
});
var average = sum / len;
console.log(average);
setHeater(average < threshold);
});
}, interval);
});
|
Add test to make sure public facing app raises exception when it hits an error.
|
import mock
from auslib.test.web.test_client import ClientTestBase
class TestDockerflowEndpoints(ClientTestBase):
def testVersion(self):
ret = self.client.get("/__version__")
self.assertEquals(ret.data, """
{
"source":"https://github.com/mozilla/balrog",
"version":"1.0",
"commit":"abcdef123456"
}
""")
def testHeartbeat(self):
with mock.patch("auslib.global_state.dbo.rules.countRules") as cr:
ret = self.client.get("/__heartbeat__")
self.assertEqual(ret.status_code, 200)
self.assertEqual(cr.call_count, 1)
def testHeartbeatWithException(self):
with mock.patch("auslib.global_state.dbo.rules.countRules") as cr:
cr.side_effect = Exception("kabom!")
# Because there's no web server between us and the endpoint, we recieve
# the Exception directly instead of a 500 error
self.assertRaises(Exception, self.client.get, "/__heartbeat__")
self.assertEqual(cr.call_count, 1)
def testLbHeartbeat(self):
ret = self.client.get("/__lbheartbeat__")
self.assertEqual(ret.status_code, 200)
|
import mock
from auslib.test.web.test_client import ClientTestBase
class TestDockerflowEndpoints(ClientTestBase):
def testVersion(self):
ret = self.client.get("/__version__")
self.assertEquals(ret.data, """
{
"source":"https://github.com/mozilla/balrog",
"version":"1.0",
"commit":"abcdef123456"
}
""")
def testHeartbeat(self):
with mock.patch("auslib.global_state.dbo.rules.countRules") as cr:
ret = self.client.get("/__heartbeat__")
self.assertEqual(ret.status_code, 200)
self.assertEqual(cr.call_count, 1)
def testLbHeartbeat(self):
ret = self.client.get("/__lbheartbeat__")
self.assertEqual(ret.status_code, 200)
|
Make it work in Python < 2.6
|
import os
import sys
def importRelativeOrAbsolute(f):
# maybe there's a way to do this more sanely with the |imp| module...
if f.endswith(".py"):
f = f[:-3]
if f.endswith(".pyc"):
f = f[:-4]
p, f = os.path.split(f)
if p:
# Add the path part of the given filename to the import path
sys.path.append(p)
else:
# Add working directory to the import path
sys.path.append(".")
try:
module = __import__(f)
except ImportError, e:
print "Failed to import: " + f
print "From: " + __file__
print str(e)
raise
sys.path.pop()
return module
|
import os
import sys
def importRelativeOrAbsolute(f):
# maybe there's a way to do this more sanely with the |imp| module...
if f.endswith(".py"):
f = f[:-3]
if f.endswith(".pyc"):
f = f[:-4]
p, f = os.path.split(f)
if p:
# Add the path part of the given filename to the import path
sys.path.append(p)
else:
# Add working directory to the import path
sys.path.append(".")
try:
module = __import__(f)
except ImportError as e:
print "Failed to import: " + f
print "From: " + __file__
print str(e)
raise
sys.path.pop()
return module
|
Fix small bug in Scaler class
|
# Author: Mainak Jas <mainak@neuro.hut.fi>
#
# License: BSD (3-clause)
from sklearn.base import TransformerMixin
from mne.fiff import pick_types
class RtClassifier:
"""
TODO: complete docstring ...
Parameters
----------
Attributes
----------
"""
def __init__(self, estimator):
self.estimator = estimator
def fit(self, X, y):
self.estimator.fit(X, y)
return self
def predict(self, X):
result = self.estimator.predict(X)
return result
class Scaler(TransformerMixin):
def __init__(self, info):
self.info = info
def transform(self, epochs_data):
picks_list = [pick_types(self.info, meg='mag', exclude='bads'),
pick_types(self.info, eeg='True', exclude='bads'),
pick_types(self.info, meg='grad', exclude='bads')]
for pick_one in picks_list:
ch_mean = epochs_data[:, pick_one, :].mean(axis=1)[:, None, :]
epochs_data[:, pick_one, :] -= ch_mean
return epochs_data
|
# Author: Mainak Jas <mainak@neuro.hut.fi>
#
# License: BSD (3-clause)
from sklearn.base import TransformerMixin
from mne.fiff import pick_types
class RtClassifier:
"""
TODO: complete docstring ...
Parameters
----------
Attributes
----------
"""
def __init__(self, estimator):
self.estimator = estimator
def fit(self, X, y):
self.estimator.fit(X, y)
return self
def predict(self, X):
result = self.estimator.predict(X)
return result
class Scaler(TransformerMixin):
def __init__(self, info):
self.info = info
def transform(self, epochs_data):
picks_list = [pick_types(epochs_data.info, meg='mag', exclude='bads'),
pick_types(epochs_data.info, eeg='True', exclude='bads'),
pick_types(epochs_data.info, meg='grad', exclude='bads')]
for pick_one in picks_list:
ch_mean = epochs_data[:, pick_one, :].mean(axis=1)[:, None, :]
epochs_data[:, pick_one, :] -= ch_mean
return epochs_data
|
Remove reference to ForceCapitalizeMixin from migration file and update with SlugifyMixin
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-08-30 19:59
from __future__ import unicode_literals
import common.mixins
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('timetables', '0003_mealoption'),
]
operations = [
migrations.CreateModel(
name='Course',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=150, unique=True)),
],
bases=(common.mixins.SlugifyMixin, models.Model),
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-08-30 19:59
from __future__ import unicode_literals
import common.mixins
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('timetables', '0003_mealoption'),
]
operations = [
migrations.CreateModel(
name='Course',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=150, unique=True)),
],
bases=(common.mixins.ForceCapitalizeMixin, models.Model),
),
]
|
Fix redis error => If redis client subscribed a key, it cannot run another commands.
|
import SocketIO from 'socket.io';
import { createClient } from 'redis';
import { redisUrl } from './config';
const io = SocketIO();
io.use((socket, next) => {
return next();
});
io.of('/live-chatroom')
.use((socket, next) => {
return next();
})
.on('connection', (socket) => {
const redisSubscriber = createClient(redisUrl);
let currentRoom = '';
socket.on('subscribe', (id) => {
currentRoom = `live:${id}:comments`;
socket.join(currentRoom);
redisSubscriber.subscribe(`${currentRoom}:latest`);
const redis = createClient(redisUrl);
redis.exists('live', id, (err, result) => {
if (result < 2) {
redis.lpush('live', id);
}
});
redis.quit();
socket.emit('subscribed');
});
redisSubscriber.on('message', (channel, message) => {
socket.emit('comment', message);
});
socket.on('unsubscribe', (id) => {
socket.leave(id);
socket.emit('unsubscribed');
});
socket.on('disconnect', () => {
redisSubscriber.unsubscribe(`${currentRoom}:latest`);
redisSubscriber.quit();
})
});
export default io;
|
import SocketIO from 'socket.io';
import { createClient } from 'redis';
import { redisUrl } from './config';
const io = SocketIO();
io.use((socket, next) => {
return next();
});
io.of('/live-chatroom')
.use((socket, next) => {
return next();
})
.on('connection', (socket) => {
const redis = createClient(redisUrl);
let currentRoom = '';
socket.on('subscribe', (id) => {
currentRoom = `live:${id}:comments`;
socket.join(currentRoom);
redis.subscribe(`${currentRoom}:latest`);
redis.exists('live', id, (err, result) => {
if (result < 2) {
redis.lpush('live', id);
}
});
socket.emit('subscribed');
});
redis.on('message', (channel, message) => {
socket.emit('comment', message);
});
socket.on('unsubscribe', (id) => {
socket.leave(id);
socket.emit('unsubscribed');
});
socket.on('disconnect', () => {
redis.unsubscribe(`${currentRoom}:latest`);
redis.quit();
})
});
export default io;
|
Fix redeclaration of name in use of Map.computeIfAbsent.
|
package org.babblelang.engine.impl.natives.java;
import org.babblelang.engine.impl.Callable;
import org.babblelang.engine.impl.Interpreter;
import org.babblelang.engine.impl.Namespace;
import org.babblelang.engine.impl.Scope;
import org.babblelang.parser.BabbleParser;
import java.util.HashMap;
import java.util.Map;
public class ImportFunction implements Callable<JavaPackage> {
private final Map<String, JavaPackage> packages = new HashMap<>();
public Namespace bindParameters(Interpreter interpreter, BabbleParser.CallContext callSite, Namespace parent, Parameters parameters) {
Namespace namespace = parent.enter(null);
Object first = parameters.values().iterator().next();
namespace.define("name", true).set(first);
return namespace;
}
public JavaPackage call(Interpreter interpreter, BabbleParser.CallContext callSite, Scope scope) {
String name = (String) scope.get("name").get();
return getPackage(name);
}
public JavaPackage getPackage(String name) {
return packages.computeIfAbsent(name, n -> new JavaPackage(this, n));
}
}
|
package org.babblelang.engine.impl.natives.java;
import org.babblelang.engine.impl.Callable;
import org.babblelang.engine.impl.Interpreter;
import org.babblelang.engine.impl.Namespace;
import org.babblelang.engine.impl.Scope;
import org.babblelang.parser.BabbleParser;
import java.util.HashMap;
import java.util.Map;
public class ImportFunction implements Callable<JavaPackage> {
private final Map<String, JavaPackage> packages = new HashMap<>();
public Namespace bindParameters(Interpreter interpreter, BabbleParser.CallContext callSite, Namespace parent, Parameters parameters) {
Namespace namespace = parent.enter(null);
Object first = parameters.values().iterator().next();
namespace.define("name", true).set(first);
return namespace;
}
public JavaPackage call(Interpreter interpreter, BabbleParser.CallContext callSite, Scope scope) {
String name = (String) scope.get("name").get();
return getPackage(name);
}
public JavaPackage getPackage(String name) {
JavaPackage result = packages.computeIfAbsent(name, name -> new JavaPackage(this, name));
return result;
}
}
|
Store event column in text field
|
<?php
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class CreateNewEventsTableSchema extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('events', function (Blueprint $table) {
$table->increments('id');
$table->morphs('eventable');
$table->text('content');
$table->timestamps();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::drop('events');
}
}
|
<?php
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class CreateNewEventsTableSchema extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('events', function (Blueprint $table) {
$table->increments('id');
$table->morphs('eventable');
$table->json('content');
$table->timestamps();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::drop('events');
}
}
|
Check if title should be updated
|
'use strict';
var React = require('react'),
withSideEffect = require('react-side-effect');
function reducePropsToState(propsList) {
var innermostProps = propsList[propsList.length - 1];
if (innermostProps) {
return innermostProps.title;
}
}
function handleStateChangeOnClient(title) {
var nextTitle = title || '';
if (nextTitle !== document.title) {
document.title = nextTitle;
}
}
var DocumentTitle = React.createClass({
displayName: 'DocumentTitle',
propTypes: {
title: React.PropTypes.string.isRequired
},
render: function render() {
if (this.props.children) {
return React.Children.only(this.props.children);
} else {
return null;
}
}
});
module.exports = withSideEffect(
reducePropsToState,
handleStateChangeOnClient
)(DocumentTitle);
|
'use strict';
var React = require('react'),
withSideEffect = require('react-side-effect');
function reducePropsToState(propsList) {
var innermostProps = propsList[propsList.length - 1];
if (innermostProps) {
return innermostProps.title;
}
}
function handleStateChangeOnClient(title) {
if (title !== document.title) {
document.title = title || '';
}
}
var DocumentTitle = React.createClass({
displayName: 'DocumentTitle',
propTypes: {
title: React.PropTypes.string.isRequired
},
render: function render() {
if (this.props.children) {
return React.Children.only(this.props.children);
} else {
return null;
}
}
});
module.exports = withSideEffect(
reducePropsToState,
handleStateChangeOnClient
)(DocumentTitle);
|
Remove version check now that TFX 0.25 is released.
PiperOrigin-RevId: 343533114
|
# Lint as: python3
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for tfx.components.transform.executor.
With the native TF2 code path being exercised.
"""
import os
import tensorflow as tf
from tfx.components.transform import executor_test
class ExecutorV2Test(executor_test.ExecutorTest):
# Should not rely on inherited _SOURCE_DATA_DIR for integration tests to work
# when TFX is installed as a non-editable package.
_SOURCE_DATA_DIR = os.path.join(
os.path.dirname(os.path.dirname(__file__)), 'testdata')
def _use_force_tf_compat_v1(self):
return False
if __name__ == '__main__':
tf.test.main()
|
# Lint as: python3
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for tfx.components.transform.executor.
With the native TF2 code path being exercised.
"""
import os
import tensorflow as tf
import tensorflow_transform as tft
from tfx.components.transform import executor_test
class ExecutorV2Test(executor_test.ExecutorTest):
# Should not rely on inherited _SOURCE_DATA_DIR for integration tests to work
# when TFX is installed as a non-editable package.
_SOURCE_DATA_DIR = os.path.join(
os.path.dirname(os.path.dirname(__file__)), 'testdata')
def _use_force_tf_compat_v1(self):
return False
if __name__ == '__main__':
# TODO(b/168641322): remove once TFT post-0.25.0 released and depended on.
if tft.__version__ > '0.25.0' and tf.version.VERSION >= '2.4':
tf.test.main()
|
Add test re: hide kwarg
|
from spec import eq_, skip, Spec, raises, ok_, trap
from invoke.run import run
from invoke.exceptions import Failure
class Run(Spec):
"""run()"""
def return_code_in_result(self):
r = run("echo 'foo'")
eq_(r.stdout, "foo\n")
eq_(r.return_code, 0)
eq_(r.exited, 0)
def nonzero_return_code_for_failures(self):
result = run("false", warn=True)
eq_(result.exited, 1)
result = run("goobypls", warn=True)
eq_(result.exited, 127)
@raises(Failure)
def fast_failures(self):
run("false")
def run_acts_as_success_boolean(self):
ok_(not run("false", warn=True))
ok_(run("true"))
def non_one_return_codes_still_act_as_False(self):
ok_(not run("goobypls", warn=True, hide=True))
def warn_kwarg_allows_continuing_past_failures(self):
eq_(run("false", warn=True).exited, 1)
@trap
def hide_kwarg_allows_hiding_output(self):
run("echo 'foo'", hide=True)
eq_(sys.stdall.getvalue(), "")
|
from spec import eq_, skip, Spec, raises, ok_
from invoke.run import run
from invoke.exceptions import Failure
class Run(Spec):
"""run()"""
def return_code_in_result(self):
r = run("echo 'foo'")
eq_(r.stdout, "foo\n")
eq_(r.return_code, 0)
eq_(r.exited, 0)
def nonzero_return_code_for_failures(self):
result = run("false", warn=True)
eq_(result.exited, 1)
result = run("goobypls", warn=True)
eq_(result.exited, 127)
@raises(Failure)
def fast_failures(self):
run("false")
def run_acts_as_success_boolean(self):
ok_(not run("false", warn=True))
ok_(run("true"))
def non_one_return_codes_still_act_as_False(self):
ok_(not run("goobypls", warn=True))
def warn_kwarg_allows_continuing_past_failures(self):
eq_(run("false", warn=True).exited, 1)
|
Remove variable declaration for service and just return the object.
|
Application.Services.factory('tags', ["mcapi",
function tags(mcapi) {
return {
tags: [],
createTag: function (tag, item_id) {
mcapi('/tags/item/%', item_id)
.success(function (tag) {
return tag;
}).post(tag);
},
removeTag: function (tag_id, item_id) {
mcapi('/tags/%/item/%', tag_id, item_id)
.success(function (tag) {
return tag;
}).delete();
}
};
}]);
|
Application.Services.factory('tags', ["mcapi",
function tags(mcapi) {
var service = {
tags: [],
createTag: function (tag, item_id) {
mcapi('/tags/item/%', item_id)
.success(function (tag) {
return tag;
}).post(tag);
},
removeTag: function (tag_id, item_id) {
mcapi('/tags/%/item/%', tag_id, item_id)
.success(function (tag) {
return tag;
}).delete();
}
};
return service;
}]);
|
Update filter args and fix name
The newer version of django-filter uses `field_name` rather than `name`
|
from rest_framework import viewsets
from django_filters import filters, filterset
from api.v09.views import ResultsSetPagination
from ..models import CandidateResult, ResultSet
from ..serializers import CandidateResultSerializer, ResultSetSerializer
class CandidateResultViewSet(viewsets.ModelViewSet):
queryset = CandidateResult.objects.select_related(
"membership__party", "membership__post", "membership__person"
).order_by("id")
serializer_class = CandidateResultSerializer
pagination_class = ResultsSetPagination
class ResultSetFilter(filterset.FilterSet):
election_id = filters.CharFilter(field_name="post_election__election__slug")
election_date = filters.DateFilter(
field_name="post_election__election__election_date"
)
class Meta:
model = ResultSet
fields = ["election_date", "election_id"]
class ResultSetViewSet(viewsets.ModelViewSet):
queryset = ResultSet.objects.select_related(
"post_election__post", "user"
).order_by("id")
serializer_class = ResultSetSerializer
pagination_class = ResultsSetPagination
filter_class = ResultSetFilter
|
from rest_framework import viewsets
from django_filters import filters, filterset
from api.v09.views import ResultsSetPagination
from ..models import CandidateResult, ResultSet
from ..serializers import CandidateResultSerializer, ResultSetSerializer
class CandidateResultViewSet(viewsets.ModelViewSet):
queryset = CandidateResult.objects.select_related(
"membership__party", "membership__post", "membership__person"
).order_by("id")
serializer_class = CandidateResultSerializer
pagination_class = ResultsSetPagination
class ProductFilter(filterset.FilterSet):
election_id = filters.CharFilter(name="post_election__election__slug")
election_date = filters.DateFilter(
name="post_election__election__election_date"
)
class Meta:
model = ResultSet
fields = ["election_date", "election_id"]
class ResultSetViewSet(viewsets.ModelViewSet):
queryset = ResultSet.objects.select_related(
"post_election__post", "user"
).order_by("id")
serializer_class = ResultSetSerializer
pagination_class = ResultsSetPagination
filter_class = ProductFilter
|
Remove the hardcode from the settings.
|
from django.conf import settings as django_settings
from django.utils.translation import ugettext_lazy as _
def settings(request):
if not getattr(django_settings, "SOCIAL", None):
return {}
return {
"SOCIAL_FACEBOOK": django_settings.SOCIAL.get("FACEBOOK", ""),
"SOCIAL_TWITTER": django_settings.SOCIAL.get("TWITTER", ""),
"SOCIAL_GITHUB_REPO": django_settings.SOCIAL.get("GITHUB_REPO", ""),
"GOOGLE_ANALYTICS_ID": django_settings.SOCIAL.get("GOOGLE_ANALYTICS_ID", ""),
"SITE_TITLE": django_settings.SITE_TITLE
}
|
from django.conf import settings as django_settings
from django.utils.translation import ugettext_lazy as _
def settings(request):
if not getattr(django_settings, "SOCIAL", None):
return {}
return {
"SOCIAL_FACEBOOK": django_settings.SOCIAL.get("FACEBOOK", ""),
"SOCIAL_TWITTER": django_settings.SOCIAL.get("TWITTER", ""),
"SOCIAL_GITHUB_REPO": django_settings.SOCIAL.get("GITHUB_REPO", ""),
"GOOGLE_ANALYTICS_ID": django_settings.SOCIAL.get("GOOGLE_ANALYTICS_ID", ""),
"SITE_TITLE": _("People's Archive of Rural India")
}
|
Add renderer setting for electron
|
'use strict';
const webpack = require('webpack');
const env = process.env.NODE_ENV || 'development';
const isDev = env === 'development';
const devtool = isDev ? '#inline-source-map' : null;
const uglify = isDev ? null : new webpack.optimize.UglifyJsPlugin({
output: {
comments: false
},
compress: {
dead_code: true, // eslint-disable-line camelcase
warnings: false
}
});
const plugins = [
new webpack.DefinePlugin({
'process.env': {
NODE_ENV: JSON.stringify(env)
}
}),
uglify
].filter(v => v);
module.exports = {
cache: true,
entry: [
'babel-polyfill',
'./renderer/index.js'
],
debug: env === 'development',
target: 'electron',
devtool,
output: {
path: `${__dirname}/dist`,
filename: 'bundle.js'
},
plugins,
module: {
loaders: [
{
test: /\.js$/,
loader: 'babel?cacheDirectory',
exclude: /node_modules/
}
]
}
};
|
'use strict';
const webpack = require('webpack');
const env = process.env.NODE_ENV || 'development';
const isDev = env === 'development';
const devtool = isDev ? '#inline-source-map' : null;
const uglify = isDev ? null : new webpack.optimize.UglifyJsPlugin({
output: {
comments: false
},
compress: {
dead_code: true, // eslint-disable-line camelcase
warnings: false
}
});
const plugins = [
new webpack.DefinePlugin({
'process.env': {
NODE_ENV: JSON.stringify(env)
}
}),
uglify
].filter(v => v);
module.exports = {
cache: true,
entry: [
'babel-polyfill',
'./renderer/index.js'
],
debug: env === 'development',
devtool,
output: {
path: `${__dirname}/dist`,
filename: 'bundle.js'
},
plugins,
module: {
loaders: [
{
test: /\.js$/,
loader: 'babel?cacheDirectory',
exclude: /node_modules/
}
]
}
};
|
Use Setext strategy in GitHub built in Writer
|
"""
File that initializes a Writer object designed for GitHub style markdown files.
"""
from anchorhub.writer import Writer
from anchorhub.builtin.github.wstrategies import MarkdownATXWriterStrategy, \
MarkdownSetextWriterStrategy, MarkdownInlineLinkWriterStrategy
import anchorhub.builtin.github.switches as ghswitches
def make_github_markdown_writer(opts):
"""
Creates a Writer object used for parsing and writing Markdown files with
a GitHub style anchor transformation
:param opts:
:return: A Writer object designed for parsing, modifying, and writing
AnchorHub tags to converted anchors in Markdown files using GitHub style
anchors
"""
assert hasattr(opts, 'wrapper_regex')
atx = MarkdownATXWriterStrategy(opts)
setext = MarkdownSetextWriterStrategy(opts)
inline = MarkdownInlineLinkWriterStrategy(opts)
code_block_switch = ghswitches.code_block_switch
strategies = [atx, setext, inline]
switches = [code_block_switch]
return Writer(strategies, switches=switches)
|
"""
File that initializes a Writer object designed for GitHub style markdown files.
"""
from anchorhub.writer import Writer
from anchorhub.builtin.github.wstrategies import MarkdownATXWriterStrategy, \
MarkdownSetextWriterStrategy, MarkdownInlineLinkWriterStrategy
import anchorhub.builtin.github.switches as ghswitches
def make_github_markdown_writer(opts):
"""
Creates a Writer object used for parsing and writing Markdown files with
a GitHub style anchor transformation
:param opts:
:return: A Writer object designed for parsing, modifying, and writing
AnchorHub tags to converted anchors in Markdown files using GitHub style
anchors
"""
assert hasattr(opts, 'wrapper_regex')
atx = MarkdownATXWriterStrategy(opts)
inline = MarkdownInlineLinkWriterStrategy(opts)
code_block_switch = ghswitches.code_block_switch
strategies = [atx, inline]
switches = [code_block_switch]
return Writer(strategies, switches=switches)
|
Allow arg to specify spawning type
|
import os
import multiprocessing
import threading
class Sponsor(object):
def __init__(self):
print('Sponsor pid: {}'.format(os.getpid()))
def create(self, behavior):
return Actor(behavior, self)
class Actor(object):
def __init__(self, behavior, sponsor):
self.behavior = behavior
self.sponsor = sponsor
def send(self, message, method='thread'):
spawn(self.behavior, message, method)
def spawn(f, args, method='thread'):
if method == 'thread':
t = threading.Thread(target=f, args=(args,))
t.start()
if method == 'process':
p = multiprocessing.Process(target=f, args=(args,))
p.start()
sponsor = Sponsor()
def stateless_beh(message):
print("Got message: {}".format(message))
stateless = sponsor.create(stateless_beh)
def stateful_beh(state):
def _f(message):
print("Have state: {}".format(state))
print("Got message: {}".format(message))
return _f
stateful = sponsor.create(stateful_beh({'key': 5}))
|
import os
import multiprocessing
import threading
class Sponsor(object):
def __init__(self):
print('Sponsor pid: {}'.format(os.getpid()))
def create(self, behavior):
return Actor(behavior, self)
class Actor(object):
def __init__(self, behavior, sponsor):
self.behavior = behavior
self.sponsor = sponsor
def send(self, message):
spawn(self.behavior, message, method='process')
def spawn(f, args, method='thread'):
if method == 'thread':
t = threading.Thread(target=f, args=(args,))
t.start()
if method == 'process':
p = multiprocessing.Process(target=f, args=(args,))
p.start()
sponsor = Sponsor()
def stateless_beh(message):
print("Got message: {}".format(message))
stateless = sponsor.create(stateless_beh)
def stateful_beh(state):
def _f(message):
print("Have state: {}".format(state))
print("Got message: {}".format(message))
return _f
stateful = sponsor.create(stateful_beh({'key': 5}))
|
AccessController: Fix spi.ac.AttmeptTarget to change types of session_id and attempt_id
|
package io.digdag.spi.ac;
import org.immutables.value.Value;
@Value.Immutable
public interface AttemptTarget
{
int getSiteId();
String getProjectName();
String getWorkflowName();
long getSessionId();
long getId();
static AttemptTarget of(int siteId, String projectName, String workflowName, long sessionId, long id)
{
return ImmutableAttemptTarget.builder()
.siteId(siteId)
.projectName(projectName)
.workflowName(workflowName)
.sessionId(sessionId)
.id(id)
.build();
}
}
|
package io.digdag.spi.ac;
import org.immutables.value.Value;
@Value.Immutable
public interface AttemptTarget
{
int getSiteId();
String getProjectName();
String getWorkflowName();
int getSessionId();
int getId();
static AttemptTarget of(int siteId, String projectName, String workflowName, int sessionId, int id)
{
return ImmutableAttemptTarget.builder()
.siteId(siteId)
.projectName(projectName)
.workflowName(workflowName)
.sessionId(sessionId)
.id(id)
.build();
}
}
|
Remove exception from method signature
|
package de.codecentric.centerdevice.glass;
import com.sun.glass.ui.Application;
import de.codecentric.centerdevice.util.ReflectionUtils;
import javafx.application.Platform;
public class MacApplicationAdapter {
private Application app;
private boolean forceQuitOnCmdQ = true;
public MacApplicationAdapter() {
app = Application.GetApplication();
}
public void hide() {
ReflectionUtils.invokeQuietly(app, "_hide");
}
public void hideOtherApplications() {
ReflectionUtils.invokeQuietly(app, "_hideOtherApplications");
}
public void unhideAllApplications() {
ReflectionUtils.invokeQuietly(app, "_unhideAllApplications");
}
public void quit() {
Application.EventHandler eh = app.getEventHandler();
if (eh != null) {
eh.handleQuitAction(Application.GetApplication(), System.nanoTime());
}
if (forceQuitOnCmdQ) {
Platform.exit();
}
}
public void setForceQuitOnCmdQ(boolean forceQuit) {
this.forceQuitOnCmdQ = forceQuit;
}
}
|
package de.codecentric.centerdevice.glass;
import com.sun.glass.ui.Application;
import de.codecentric.centerdevice.util.ReflectionUtils;
import javafx.application.Platform;
public class MacApplicationAdapter {
private Application app;
private boolean forceQuitOnCmdQ = true;
public MacApplicationAdapter() throws ReflectiveOperationException {
app = Application.GetApplication();
}
public void hide() {
ReflectionUtils.invokeQuietly(app, "_hide");
}
public void hideOtherApplications() {
ReflectionUtils.invokeQuietly(app, "_hideOtherApplications");
}
public void unhideAllApplications() {
ReflectionUtils.invokeQuietly(app, "_unhideAllApplications");
}
public void quit() {
Application.EventHandler eh = app.getEventHandler();
if (eh != null) {
eh.handleQuitAction(Application.GetApplication(), System.nanoTime());
}
if (forceQuitOnCmdQ) {
Platform.exit();
}
}
public void setForceQuitOnCmdQ(boolean forceQuit) {
this.forceQuitOnCmdQ = forceQuit;
}
}
|
Fix regexp logic when installed into subfolder
|
<?php
/*
* This file is part of Slim HTTP Basic Authentication middleware
*
* Copyright (c) 2013-2015 Mika Tuupola
*
* Licensed under the MIT license:
* http://www.opensource.org/licenses/mit-license.php
*
* Project home:
* https://github.com/tuupola/slim-basic-auth
*
*/
namespace Slim\Middleware\HttpBasicAuthentication;
class RequestPathRule implements RuleInterface
{
protected $options = array(
"path" => "/",
"passthrough" => array()
);
public function __construct($options = array())
{
$this->options = array_merge($this->options, $options);
}
public function __invoke(\Slim\Slim $app)
{
/* If request path is matches passthrough should not authenticate. */
foreach ($this->options["passthrough"] as $passthrough) {
$passthrough = rtrim($passthrough, "/");
if (!!preg_match("@^{$passthrough}(/.*)?$@", $app->request->getResourceUri())) {
return false;
}
}
/* Otherwise check if path matches and we should authenticate. */
$path = rtrim($this->options["path"], "/");
return !!preg_match("@^{$path}(/.*)?$@", $app->request->getResourceUri());
}
}
|
<?php
/*
* This file is part of Slim HTTP Basic Authentication middleware
*
* Copyright (c) 2013-2015 Mika Tuupola
*
* Licensed under the MIT license:
* http://www.opensource.org/licenses/mit-license.php
*
* Project home:
* https://github.com/tuupola/slim-basic-auth
*
*/
namespace Slim\Middleware\HttpBasicAuthentication;
class RequestPathRule implements RuleInterface
{
protected $options = array(
"path" => "/",
"passthrough" => array()
);
public function __construct($options = array())
{
$this->options = array_merge($this->options, $options);
}
public function __invoke(\Slim\Slim $app)
{
/* If request path is matches passthrough should not authenticate. */
foreach ($this->options["passthrough"] as $passthrough) {
$passthrough = rtrim($passthrough, "/");
if (!!preg_match("@{$passthrough}(/.*)?$@", $app->request->getPath())) {
return false;
}
}
/* Otherwise check if path matches and we should authenticate. */
$path = rtrim($this->options["path"], "/");
return !!preg_match("@{$path}(/.*)?$@", $app->request->getPath());
}
}
|
Use mapDispatchToProps to bind actions efficiently
|
import React, { Component, PropTypes } from 'react';
import { bindActionCreators } from 'redux';
import { connect } from 'react-redux';
import Header from '../components/Header';
import MainSection from '../components/MainSection';
import * as TodoActions from '../actions/todos';
class App extends Component {
render() {
const { todos, actions } = this.props;
return (
<div>
<Header addTodo={actions.addTodo} />
<MainSection todos={todos} actions={actions} />
</div>
);
}
}
App.propTypes = {
todos: PropTypes.array.isRequired,
actions: PropTypes.object.isRequired
};
function mapStateToProps(state) {
return {
todos: state.todos
};
}
function mapDispatchToProps(dispatch) {
return {
actions: bindActionCreators(TodoActions, dispatch)
};
}
export default connect(
mapStateToProps,
mapDispatchToProps
)(App);
|
import React, { Component, PropTypes } from 'react';
import { bindActionCreators } from 'redux';
import { connect } from 'react-redux';
import Header from '../components/Header';
import MainSection from '../components/MainSection';
import * as TodoActions from '../actions/todos';
class App extends Component {
render() {
const { todos, dispatch } = this.props;
const actions = bindActionCreators(TodoActions, dispatch);
return (
<div>
<Header addTodo={actions.addTodo} />
<MainSection todos={todos} actions={actions} />
</div>
);
}
}
App.propTypes = {
todos: PropTypes.array.isRequired,
dispatch: PropTypes.func.isRequired
};
function mapStateToProps(state) {
return {
todos: state.todos
};
}
export default connect(mapStateToProps)(App);
|
Add endpoint for flask app.
|
# -*- coding: utf-8 -*-
from electro.errors import ResourceDuplicatedDefinedError
class API(object):
def __init__(self, app=None, decorators=None,
catch_all_404s=None):
self.app = app
self.endpoints = set()
self.decorators = decorators or []
self.catch_all_404s = catch_all_404s
def add_resource(self, resource, url, **kw):
endpoint = kw.pop('endpoint', None) or resource.__name__.lower()
self.endpoints.add(endpoint)
if endpoint in self.app.view_functions:
previous_view_class = self.app.view_functions[endpoint].__dict__['view_class']
if previous_view_class != resource:
raise ResourceDuplicatedDefinedError(endpoint)
resource.endpoint = endpoint
resource_func = resource.as_view(endpoint)
for decorator in self.decorators:
resource_func = decorator(resource_func)
self.app.add_url_rule(url, view_func=resource_func, endpoint=endpoint, **kw)
|
# -*- coding: utf-8 -*-
from electro.errors import ResourceDuplicatedDefinedError
class API(object):
def __init__(self, app=None, decorators=None,
catch_all_404s=None):
self.app = app
self.endpoints = set()
self.decorators = decorators or []
self.catch_all_404s = catch_all_404s
def add_resource(self, resource, url, **kw):
endpoint = kw.pop('endpoint', None) or resource.__name__.lower()
self.endpoints.add(endpoint)
if endpoint in self.app.view_functions:
previous_view_class = self.app.view_functions[endpoint].__dict__['view_class']
if previous_view_class != resource:
raise ResourceDuplicatedDefinedError(endpoint)
resource.endpoint = endpoint
resource_func = resource.as_view(endpoint)
for decorator in self.decorators:
resource_func = decorator(resource_func)
self.app.add_url_rule(url, view_func=resource_func, **kw)
|
Add missing function expression name
|
/* eslint no-console: 0 */
'use strict';
const irc = require('irc');
const server = process.env.IRC_SERVER;
const user = process.env.IRC_USER;
const channel = process.env.IRC_CHANNEL;
const client = module.exports.client = new irc.Client(server, user, {
autoConnect: false,
autoRejoin: true,
channels: [channel],
showErrors: true,
});
client.connect(5, function clientConnectCb() {
console.log(new Date(), '[IRC]', 'Connected!');
});
if (process.env.NODE_ENV !== 'testing') {
client.on('registered', function clientOnRegisterd(message) {
console.log(new Date(), '[IRC]', message.args[1]);
});
}
client.on('error', function clientOnError(error) {
console.error(error);
console.error('Shutting Down...');
process.exit(1);
});
module.exports.notify = function ircPost(nodes, callback) {
nodes.forEach(function nodesForEach(node) {
// let name = `[${node.name}](${jenkins}/computer/${node.name})`;
if (node.offline) {
client.say(channel, `Jenkins slave ${node.name} is offline`);
} else {
client.say(channel, `Jenkins slave ${node.name} is online`);
}
});
callback(null);
};
|
/* eslint no-console: 0 */
'use strict';
const irc = require('irc');
const server = process.env.IRC_SERVER;
const user = process.env.IRC_USER;
const channel = process.env.IRC_CHANNEL;
const client = module.exports.client = new irc.Client(server, user, {
autoConnect: false,
autoRejoin: true,
channels: [channel],
showErrors: true,
});
client.connect(5, function() {
console.log(new Date(), '[IRC]', 'Connected!');
});
if (process.env.NODE_ENV !== 'testing') {
client.on('registered', function clientOnRegisterd(message) {
console.log(new Date(), '[IRC]', message.args[1]);
});
}
client.on('error', function clientOnError(error) {
console.error(error);
console.error('Shutting Down...');
process.exit(1);
});
module.exports.notify = function ircPost(nodes, callback) {
nodes.forEach(function nodesForEach(node) {
// let name = `[${node.name}](${jenkins}/computer/${node.name})`;
if (node.offline) {
client.say(channel, `Jenkins slave ${node.name} is offline`);
} else {
client.say(channel, `Jenkins slave ${node.name} is online`);
}
});
callback(null);
};
|
Fix code that gets post-processed command-line args
|
// Command aws-gen-gocli parses a JSON description of an AWS API and generates a
// Go file containing a client for the API.
//
// aws-gen-gocli EC2 apis/ec2/2014-10-01.api.json service/ec2/ec2.go
package main
import (
"flag"
"fmt"
"os"
"github.com/awslabs/aws-sdk-go/model"
)
func main() {
var svcPath string
var forceService bool
flag.StringVar(&svcPath, "path", "service", "generate in a specific directory (default: 'service')")
flag.BoolVar(&forceService, "force", false, "force re-generation of PACKAGE/service.go")
flag.Parse()
api := flag.Arg(0)
in, err := os.Open(api)
if err != nil {
panic(err)
}
defer in.Close()
if err := model.Load(in); err != nil {
panic(err)
}
if err := model.Generate(svcPath, forceService); err != nil {
fmt.Fprintf(os.Stderr, "error generating %s\n", api)
panic(err)
}
}
|
// Command aws-gen-gocli parses a JSON description of an AWS API and generates a
// Go file containing a client for the API.
//
// aws-gen-gocli EC2 apis/ec2/2014-10-01.api.json service/ec2/ec2.go
package main
import (
"flag"
"fmt"
"os"
"github.com/awslabs/aws-sdk-go/model"
)
func main() {
var svcPath string
var forceService bool
flag.StringVar(&svcPath, "path", "service", "generate in a specific directory (default: 'service')")
flag.BoolVar(&forceService, "force", false, "force re-generation of PACKAGE/service.go")
flag.Parse()
api := os.Args[len(os.Args)-flag.NArg()]
in, err := os.Open(api)
if err != nil {
panic(err)
}
defer in.Close()
if err := model.Load(in); err != nil {
panic(err)
}
if err := model.Generate(svcPath, forceService); err != nil {
fmt.Fprintf(os.Stderr, "error generating %s\n", os.Args[1])
panic(err)
}
}
|
:bug: Fix a bug in the template tag.
|
from django.template.loader_tags import register
from django.template import loader, Context, defaultfilters, TemplateDoesNotExist
import markdown
presenters = {
'Speaker': 'presenters/speaker_presenter.html'
}
generic_template = 'presenters/object_presenter.html'
@register.simple_tag(takes_context=True)
def present(context, obj):
model_name = type(obj).__name__
template_name = presenters.get(model_name, generic_template)
t = loader.get_template(template_name)
return t.render(Context({
'model_name': model_name,
'obj': obj,
}))
@register.filter
def noval(data, placeholder):
if data:
return data
return placeholder
@register.simple_tag(takes_context=True)
def include_md(context, template_name):
lang = context['LANGUAGE_CODE'].replace('-', '_')
try:
t = loader.render_to_string('markdown/{}/{}'.format(lang, template_name), context)
except TemplateDoesNotExist:
t = loader.render_to_string('markdown/en/{}'.format(template_name), context)
html = markdown.markdown(t)
return defaultfilters.safe(html)
|
from django.template.loader_tags import register
from django.template import loader, Context, defaultfilters, TemplateDoesNotExist
import markdown
presenters = {
'Speaker': 'presenters/speaker_presenter.html'
}
generic_template = 'presenters/object_presenter.html'
@register.simple_tag(takes_context=True)
def present(context, obj):
model_name = type(obj).__name__
template_name = presenters.get(model_name, generic_template)
t = loader.get_template(template_name)
return t.render(Context({
'model_name': model_name,
'obj': obj,
}))
@register.filter
def noval(data, placeholder):
if data:
return data
return placeholder
@register.simple_tag(takes_context=True)
def include_md(context, template_name):
lang = context['LANGUAGE_CODE'].replace('-', '_')
try:
t = loader.render_to_string('markdown/{}/{}'.format(lang, template_name), context)
except TemplateDoesNotExist:
t = loader.render_to_string('markdown/en_US/{}'.format(template_name), context)
html = markdown.markdown(t)
return defaultfilters.safe(html)
|
Set __version__ to 6.1.4 in preparation for the v6.1.4 release
That is all.
Change-Id: I79edd9574995e50c17c346075bf158e6f1d64a0c
Reviewed-on: https://gerrit.franz.com:9080/6845
Reviewed-by: Tadeusz Sznuk <4402abb98f9559cbfb6d73029f928227b498069b@franz.com>
Reviewed-by: Ahmon Dancy <8f7d8ce2c6797410ae95fecd4c30801ee9f760ac@franz.com>
Tested-by: Ahmon Dancy <8f7d8ce2c6797410ae95fecd4c30801ee9f760ac@franz.com>
|
# The version number must follow these rules:
# - When the server is released, a client with exactly the same version number
# should be released.
# - Bugfixes should be released as consecutive post-releases,
# that is versions of the form X.Y.Z.postN, where X.Y.Z is
# the AG version number and N increases with each fix.
# - Code from the development branch may be released any time
# with a version of the form X.Y.ZrcN (rc = release candidate).
#
# When this file is committed to git the version should look like this:
# - In any branch that has already been released: X.Y.Z
# AG and Python client versions should be the same.
# - In a 'stable' branch: X.Y.ZpostN, where X.Y.Z is the PREVIOUS
# version of AG.
# - In the development branch: X.Y.ZpreN.
#
# The full range of valid version numbers is described here:
# https://www.python.org/dev/peps/pep-0440/
__version__ = u'6.1.4'
|
# The version number must follow these rules:
# - When the server is released, a client with exactly the same version number
# should be released.
# - Bugfixes should be released as consecutive post-releases,
# that is versions of the form X.Y.Z.postN, where X.Y.Z is
# the AG version number and N increases with each fix.
# - Code from the development branch may be released any time
# with a version of the form X.Y.ZrcN (rc = release candidate).
#
# When this file is committed to git the version should look like this:
# - In any branch that has already been released: X.Y.Z
# AG and Python client versions should be the same.
# - In a 'stable' branch: X.Y.ZpostN, where X.Y.Z is the PREVIOUS
# version of AG.
# - In the development branch: X.Y.ZpreN.
#
# The full range of valid version numbers is described here:
# https://www.python.org/dev/peps/pep-0440/
__version__ = u'6.1.3.post1'
|
Add function to change string to upper camel case
|
define(function (require, exports, module) {
'use strict';
function CodeWriter(indentString) {
this.lines = [];
this.indentString = (indentString ? indentString : ' ');
this.indentations = [];
}
CodeWriter.prototype.indent = function () {
this.indentations.push(this.indentString);
};
CodeWriter.prototype.outdent = function () {
this.indentations.splice(this.indentations.length - 1, 1);
};
CodeWriter.prototype.writeLine = function (line) {
if (line) {
this.lines.push(this.indentations.join('') + line);
} else {
this.lines.push('');
}
};
CodeWriter.prototype.getData = function () {
return this.lines.join('\n');
};
CodeWriter.prototype.fileName = function (className) {
return className.replace(/([a-z])([A-Z])/g, '$1_$2').toLowerCase();
};
CodeWriter.prototype.toCamelCase = function (className) {
return className.replace(/(\b|_)\w/g, function (match) {
return match.replace(/_/, '').toUpperCase();
});
};
exports.CodeWriter = CodeWriter;
});
|
define(function (require, exports, module) {
'use strict';
function CodeWriter(indentString) {
this.lines = [];
this.indentString = (indentString ? indentString : ' ');
this.indentations = [];
}
CodeWriter.prototype.indent = function () {
this.indentations.push(this.indentString);
};
CodeWriter.prototype.outdent = function () {
this.indentations.splice(this.indentations.length - 1, 1);
};
CodeWriter.prototype.writeLine = function (line) {
if (line) {
this.lines.push(this.indentations.join('') + line);
} else {
this.lines.push('');
}
};
CodeWriter.prototype.getData = function () {
return this.lines.join('\n');
};
CodeWriter.prototype.fileName = function (className) {
return className.replace(/([a-z])([A-Z])/g, '$1_$2').toLowerCase();
};
exports.CodeWriter = CodeWriter;
});
|
Fix validation of Client objects
|
import uuid
from django.db import transaction, IntegrityError
from mygpo.users.settings import STORE_UA
from mygpo.users.models import Client
import logging
logger = logging.getLogger(__name__)
def get_device(user, uid, user_agent, undelete=True):
"""
Loads or creates the device indicated by user, uid.
If the device has been deleted and undelete=True, it is undeleted.
"""
store_ua = user.profile.settings.get_wksetting(STORE_UA)
# list of fields to update -- empty list = no update
update_fields = []
try:
with transaction.atomic():
client = Client(id=uuid.uuid1(), user=user, uid=uid)
client.clean_fields()
client.clean()
client.save()
except IntegrityError:
client = Client.objects.get(user=user, uid=uid)
if client.deleted and undelete:
client.deleted = False
update_fields.append('deleted')
if store_ua and user_agent and client.user_agent != user_agent:
client.user_agent = user_agent
update_fields.append('user_agent')
if update_fields:
client.save(update_fields=update_fields)
return client
|
import uuid
from django.db import transaction, IntegrityError
from mygpo.users.settings import STORE_UA
from mygpo.users.models import Client
import logging
logger = logging.getLogger(__name__)
def get_device(user, uid, user_agent, undelete=True):
"""
Loads or creates the device indicated by user, uid.
If the device has been deleted and undelete=True, it is undeleted.
"""
store_ua = user.profile.settings.get_wksetting(STORE_UA)
# list of fields to update -- empty list = no update
update_fields = []
try:
with transaction.atomic():
client = Client(id=uuid.uuid1(), user=user, uid=uid)
client.full_clean()
client.save()
except IntegrityError:
client = Client.objects.get(user=user, uid=uid)
if client.deleted and undelete:
client.deleted = False
update_fields.append('deleted')
if store_ua and user_agent and client.user_agent != user_agent:
client.user_agent = user_agent
update_fields.append('user_agent')
if update_fields:
client.save(update_fields=update_fields)
return client
|
Allow to give already initialized redis client instead of the port/host
|
const duration = require('@maxdome/duration');
const Throttle = require('redis-throttle');
module.exports = config => {
if (config.redis) {
Throttle.rdb = config.redis;
} else {
Throttle.configure(config);
}
return (key, limit, callback, fallback) => {
let span = '1 second';
if (typeof limit === 'string') {
[limit, span] = limit.split(' per ');
}
span = duration(span).milliseconds();
const throttle = new Throttle(key, { span, accuracy: span / 10 });
return new Promise((resolve, reject) => {
throttle.read((err, count) => {
if (err) {
reject(err);
return;
}
if (count <= limit) {
throttle.increment(1, err => {
if (err) {
reject(err);
return;
}
resolve(callback());
});
} else {
if (fallback) {
resolve(fallback());
} else {
resolve();
}
}
});
});
};
};
|
const duration = require('@maxdome/duration');
const Throttle = require('redis-throttle');
module.exports = config => {
Throttle.configure(config);
return (key, limit, callback, fallback) => {
let span = '1 second';
if (typeof limit === 'string') {
[limit, span] = limit.split(' per ');
}
span = duration(span).milliseconds();
const throttle = new Throttle(key, { span, accuracy: span / 10 });
return new Promise((resolve, reject) => {
throttle.read((err, count) => {
if (err) {
reject(err);
return;
}
if (count <= limit) {
throttle.increment(1, err => {
if (err) {
reject(err);
return;
}
resolve(callback());
});
} else {
if (fallback) {
resolve(fallback());
} else {
resolve();
}
}
});
});
};
};
|
Make sure we use the absolute path in generating cron lines
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
This tool iterates through all of the scrapers in the datasets module
and creates a cron-task for each one.
'''
import os
import pkgutil
import random
import sys
from publish.lib.manifest import get_scraper_names
def get_launch_binary():
this_location = sys.argv[0]
path = os.path.join(os.path.dirname(this_location), "run_scraper")
return os.path.abspath(path)
def get_dmswitch_binary():
this_location = sys.argv[0]
path = os.path.join(os.path.dirname(this_location), "dmswitch")
return os.path.abspath(path)
def get_random_hours_and_minutes():
random.seed()
return random.choice(xrange(0, 23)), random.choice(xrange(0, 60, 5))
def main():
binary = get_launch_binary()
dmswitch = get_dmswitch_binary()
for scraper in get_scraper_names():
cli = "{cmd} {scraper} && {dms} --switch {scraper}"\
.format(cmd=binary, scraper=scraper, dms=dmswitch)
hour, minute = get_random_hours_and_minutes()
crontime = "{} {} * * * {}".format(minute, hour, cli)
print crontime
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
This tool iterates through all of the scrapers in the datasets module
and creates a cron-task for each one.
'''
import os
import pkgutil
import random
import sys
from publish.lib.manifest import get_scraper_names
def get_launch_binary():
this_location = sys.argv[0]
return os.path.join(os.path.dirname(this_location), "run_scraper")
def get_dmswitch_binary():
this_location = sys.argv[0]
return os.path.join(os.path.dirname(this_location), "dmswitch")
def get_random_hours_and_minutes():
random.seed()
return random.choice(xrange(0, 23)), random.choice(xrange(0, 60, 5))
def main():
binary = get_launch_binary()
dmswitch = get_dmswitch_binary()
for scraper in get_scraper_names():
cli = "{cmd} {scraper} && {dms} --switch {scraper}"\
.format(cmd=binary, scraper=scraper, dms=dmswitch)
hour, minute = get_random_hours_and_minutes()
crontime = "{} {} * * * {}".format(minute, hour, cli)
print crontime
|
Rename commentthreadwriter to commentthreadworker since it does not write
anything...
|
from setuptools import setup, find_packages
setup(
name='zeit.care',
version='0.2dev',
author='Christian Zagrodnick, Ron Drongowski, Dominik Hoppe',
author_email='cz@gocept.com',
url='http://trac.gocept.com/zeit',
description="""\
""",
packages=find_packages('src'),
package_dir = {'': 'src'},
include_package_data = True,
zip_safe=False,
license='gocept proprietary',
namespace_packages = ['zeit'],
install_requires=[
'zeit.connector',
'setuptools',
'pytz',
],
entry_points = """
[console_scripts]
isofication = zeit.care.worker:isofy_main
divisor = zeit.care.divisor:main
boxinjector = zeit.care.boxinjector:main
ressortindexwriter = zeit.care.ressortindex:main
commentthreadworker = zeit.care.commentthread:main
"""
)
|
from setuptools import setup, find_packages
setup(
name='zeit.care',
version='0.2dev',
author='Christian Zagrodnick, Ron Drongowski, Dominik Hoppe',
author_email='cz@gocept.com',
url='http://trac.gocept.com/zeit',
description="""\
""",
packages=find_packages('src'),
package_dir = {'': 'src'},
include_package_data = True,
zip_safe=False,
license='gocept proprietary',
namespace_packages = ['zeit'],
install_requires=[
'zeit.connector',
'setuptools',
'pytz',
],
entry_points = """
[console_scripts]
isofication = zeit.care.worker:isofy_main
divisor = zeit.care.divisor:main
boxinjector = zeit.care.boxinjector:main
ressortindexwriter = zeit.care.ressortindex:main
commentthreadwriter = zeit.care.commentthread:main
"""
)
|
Remove depreciated createJSModules @overide marker
|
package io.rado.backgroundcolor;
import com.facebook.react.ReactPackage;
import com.facebook.react.bridge.JavaScriptModule;
import com.facebook.react.bridge.NativeModule;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.uimanager.ViewManager;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
public class BackgroundColorPackage implements ReactPackage {
public List<Class<? extends JavaScriptModule>> createJSModules() {
return Collections.emptyList();
}
@Override
public List<ViewManager> createViewManagers(ReactApplicationContext reactContext) {
return Collections.emptyList();
}
@Override
public List<NativeModule> createNativeModules(
ReactApplicationContext reactContext) {
List<NativeModule> modules = new ArrayList<>();
modules.add(new BackgroundColorModule(reactContext));
return modules;
}
}
|
package io.rado.backgroundcolor;
import com.facebook.react.ReactPackage;
import com.facebook.react.bridge.JavaScriptModule;
import com.facebook.react.bridge.NativeModule;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.uimanager.ViewManager;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
public class BackgroundColorPackage implements ReactPackage {
@Override
public List<Class<? extends JavaScriptModule>> createJSModules() {
return Collections.emptyList();
}
@Override
public List<ViewManager> createViewManagers(ReactApplicationContext reactContext) {
return Collections.emptyList();
}
@Override
public List<NativeModule> createNativeModules(
ReactApplicationContext reactContext) {
List<NativeModule> modules = new ArrayList<>();
modules.add(new BackgroundColorModule(reactContext));
return modules;
}
}
|
Update outdated link to repository, per @cknv
|
import os
import sys
from distutils.core import setup
if sys.version_info < (3,):
print('\nSorry, but Adventure can only be installed under Python 3.\n')
sys.exit(1)
README_PATH = os.path.join(os.path.dirname(__file__), 'adventure', 'README.txt')
with open(README_PATH, encoding="utf-8") as f:
README_TEXT = f.read()
setup(
name='adventure',
version='1.4',
description='Colossal Cave adventure game at the Python prompt',
long_description=README_TEXT,
author='Brandon Craig Rhodes',
author_email='brandon@rhodesmill.org',
url='https://github.com/brandon-rhodes/python-adventure',
packages=['adventure', 'adventure/tests'],
package_data={'adventure': ['README.txt', '*.dat', 'tests/*.txt']},
classifiers=[
'Development Status :: 6 - Mature',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Topic :: Games/Entertainment',
],
)
|
import os
import sys
from distutils.core import setup
if sys.version_info < (3,):
print('\nSorry, but Adventure can only be installed under Python 3.\n')
sys.exit(1)
README_PATH = os.path.join(os.path.dirname(__file__), 'adventure', 'README.txt')
with open(README_PATH, encoding="utf-8") as f:
README_TEXT = f.read()
setup(
name='adventure',
version='1.4',
description='Colossal Cave adventure game at the Python prompt',
long_description=README_TEXT,
author='Brandon Craig Rhodes',
author_email='brandon@rhodesmill.org',
url='https://bitbucket.org/brandon/adventure/overview',
packages=['adventure', 'adventure/tests'],
package_data={'adventure': ['README.txt', '*.dat', 'tests/*.txt']},
classifiers=[
'Development Status :: 6 - Mature',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Topic :: Games/Entertainment',
],
)
|
Fix wrong type mapping for game player stats
|
package com.faforever.api.dto;
import com.faforever.api.elide.ElideEntity;
import com.github.jasminb.jsonapi.annotations.Id;
import com.github.jasminb.jsonapi.annotations.Relationship;
import com.github.jasminb.jsonapi.annotations.Type;
import lombok.Builder;
import lombok.Data;
import lombok.EqualsAndHashCode;
import org.jetbrains.annotations.Nullable;
import java.time.OffsetDateTime;
@Data
@EqualsAndHashCode(onlyExplicitlyIncluded = true)
@Builder
@Type(GamePlayerStats.TYPE)
public class GamePlayerStats implements ElideEntity {
public static final String TYPE = "gamePlayerStats";
@Id
@EqualsAndHashCode.Include
private String id;
private boolean ai;
private Faction faction;
private byte color;
private byte team;
private byte startSpot;
private Float beforeMean;
private Float beforeDeviation;
private Float afterMean;
private Float afterDeviation;
private byte score;
@Nullable
private OffsetDateTime scoreTime;
@Relationship("game")
private Game game;
@Relationship("player")
private Player player;
}
|
package com.faforever.api.dto;
import com.faforever.api.elide.ElideEntity;
import com.github.jasminb.jsonapi.annotations.Id;
import com.github.jasminb.jsonapi.annotations.Relationship;
import com.github.jasminb.jsonapi.annotations.Type;
import lombok.Builder;
import lombok.Data;
import lombok.EqualsAndHashCode;
import org.jetbrains.annotations.Nullable;
import java.time.OffsetDateTime;
@Data
@EqualsAndHashCode(onlyExplicitlyIncluded = true)
@Builder
@Type(GamePlayerStats.TYPE)
public class GamePlayerStats implements ElideEntity {
public static final String TYPE = "game";
@Id
@EqualsAndHashCode.Include
private String id;
private boolean ai;
private Faction faction;
private byte color;
private byte team;
private byte startSpot;
private Float beforeMean;
private Float beforeDeviation;
private Float afterMean;
private Float afterDeviation;
private byte score;
@Nullable
private OffsetDateTime scoreTime;
@Relationship("game")
private Game game;
@Relationship("player")
private Player player;
}
|
Fix for default date format issue.
SVN-Revision: 402
|
package edu.wustl.bulkoperator.processor;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import org.apache.commons.beanutils.Converter;
import edu.wustl.bulkoperator.metadata.DateValue;
import edu.wustl.common.util.global.ApplicationProperties;
import edu.wustl.common.util.logger.Logger;
public class CustomDateConverter implements Converter {
private final static String DEFAULT_FORMAT = ApplicationProperties.getValue("bulk.date.valid.format.withtime");
private static final Logger logger = Logger.getCommonLogger(CustomDateConverter.class);
public Object convert(Class type, Object value)
{
SimpleDateFormat format = null;
String dateValue=null;
Date date=null;
if (value instanceof DateValue) {
format = new SimpleDateFormat(((DateValue) value).getFormat());
format.setLenient(false);
dateValue = ((DateValue) value).getValue();
} else {
format = new SimpleDateFormat(DEFAULT_FORMAT);
dateValue=value.toString();
}
try {
date=format.parse(dateValue);
} catch (ParseException e) {
logger.error("Error while parsing date.", e);
}
return date;
}
}
|
package edu.wustl.bulkoperator.processor;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import org.apache.commons.beanutils.Converter;
import edu.wustl.bulkoperator.metadata.DateValue;
import edu.wustl.common.util.global.ApplicationProperties;
import edu.wustl.common.util.logger.Logger;
public class CustomDateConverter implements Converter {
private final static String DEFAULT_FORMAT = ApplicationProperties.getValue("bulk.date.valid.format.withtime");
private static final Logger logger = Logger.getCommonLogger(CustomDateConverter.class);
public Object convert(Class type, Object value)
{
SimpleDateFormat format = null;
String dateValue=null;
Date date=null;
if (value instanceof DateValue) {
format = new SimpleDateFormat(((DateValue) value).getFormat());
dateValue = ((DateValue) value).getValue();
} else {
format = new SimpleDateFormat(DEFAULT_FORMAT);
dateValue=value.toString();
}
try {
date=format.parse(dateValue);
} catch (ParseException e) {
logger.error("Error while parsing date.", e);
}
return date;
}
}
|
Fix typo and execute JS script found in local folder.
|
import RPi.GPIO as GPIO
import time
import os
from optparse import OptionParser
# Parse input arguments
parser = OptionParser()
parser.add_option("-t", "--testGPIO", action="store_true", help="Test GPIO connection, does not call the JS script.")
# The option --pin sets the Input Pin for your Button
# It default to GPIO24 or HardwarePin 19
parser.add_option("-p", "--pin", dest="pin", help="GPIO pin to use. If not provided it defaults to HardwarePin 19.", default=19)
(options, args) = parser.parse_args()
testingGPIO = options.testGPIO != None
buttonPin = options.pin
#sets GPIO Mode to use Hardware Pin Layout
GPIO.setmode(GPIO.BCM)
#sets GPIO Pin to INPUT mode with a Pull Down Resistor
GPIO.setup(buttonPin,GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
if(testingGPIO):
print "Press the connected button. If you are pressing but you do not see any further output then....there is something wrong with the connection."
while True:
#waits for Pin Input and then exectures the script below
if (GPIO.input(buttonPin)):
if (testingGPIO):
print "PIN " + buttonPin + " works correctly."
continue
#the script that will be executed (as root)
os.system("node index.js")
|
import RPi.GPIO as GPIO
import time
import os
from optparse import OptionParser
# Parse input arguments
parser = OptionParser()
parser.add_option("-t", "--testGPIO", action="store_true", help="Test GPIO connection, does not call the JS script.")
# The option --pin sets the Input Pin for your Button
# It default to GPIO24 or HardwarePin 19
parser.add_option("-p", "--pin", dest="pin", help="GPIO pin to use. If not provided it defaults to HardwarePin 19.", default=19)
(options, args) = parser.parse_args()
testingGPIO = options.testGPIO != None
buttonPin = options.pin
#sets GPIO Mode to use Hardware Pin Layout
GPIO.setmode(GPIO.BCM)
#sets GPIO Pin to INPUT mode with a Pull Down Resistor
GPIO.setup(buttonPin,GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
if(testingGPIO):
print "Press the connected button. If you are pressing but you do not see any further output then....there is something wrong with the connection."
while True:
#waits for Pin Input and then exectures the script below
if (GPIO.input(buttonPin)):
if (testingGPIO):
print "PIN " + buttonPing + " works correctly."
continue
#the script that will be executed (as root)
os.system("node /home/pi/guest-password-printer/index.js")
|
Create special folder for tests
Former-commit-id: ae51c6c248baaf996f899e77e760aba1f0c161cf [formerly 7bf4f38602dd6b24f7888d88f18ca6e9cac9b9a3] [formerly 0ef00a42149f4e7724fde45c2a252e9ec7c550f5 [formerly d20e40ca85e6a06d0aba698624e3433e7c5c612a]]
Former-commit-id: 69b97c02f08943de02a1ae58c8291dcae6c56db8 [formerly d229497f2b2159c144939054ad21f57ce380c47e]
Former-commit-id: 3951a8763debafee5f9ddf812fd022c9b5ce2d10
Former-commit-id: 4a1c502ee3d367a78eae12a893d4164e999c1e45
|
package main
import (
// "fmt"
"os"
"strings"
"testing"
)
func TestGeneral(t *testing.T) {
pathToData = "testdata"
os.MkdirAll(pathToData, 0755)
defer os.RemoveAll(pathToData)
p := Open("testpage")
err := p.Update("**bold**")
if err != nil {
t.Error(err)
}
if strings.TrimSpace(p.RenderedPage) != "<p><strong>bold</strong></p>" {
t.Errorf("Did not render: '%s'", p.RenderedPage)
}
err = p.Update("**bold** and *italic*")
if err != nil {
t.Error(err)
}
p.Save()
p2 := Open("testpage")
if strings.TrimSpace(p2.RenderedPage) != "<p><strong>bold</strong> and <em>italic</em></p>" {
t.Errorf("Did not render: '%s'", p2.RenderedPage)
}
}
|
package main
import (
// "fmt"
"os"
"strings"
"testing"
)
func TestGeneral(t *testing.T) {
defer os.RemoveAll("data")
p := Open("testpage")
err := p.Update("**bold**")
if err != nil {
t.Error(err)
}
if strings.TrimSpace(p.RenderedPage) != "<p><strong>bold</strong></p>" {
t.Errorf("Did not render: '%s'", p.RenderedPage)
}
err = p.Update("**bold** and *italic*")
if err != nil {
t.Error(err)
}
p.Save()
p2 := Open("testpage")
if strings.TrimSpace(p2.RenderedPage) != "<p><strong>bold</strong> and <em>italic</em></p>" {
t.Errorf("Did not render: '%s'", p2.RenderedPage)
}
}
|
Set clear flag on render pass to false
The clear flag needs to be false to allow transparent render passes on
top of eachother. It was originally set to false in nin, but updating
three.js resets it to true, since that's the default in the official
three.js version.
This commit is a copy of 7ec1acca3327e4de25a68541d70b9a477209a927, but
needs to be re-applied because the official lib from three.js sets clear
to true.
|
/**
* @author alteredq / http://alteredqualia.com/
*/
THREE.RenderPass = function ( scene, camera, overrideMaterial, clearColor, clearAlpha ) {
THREE.Pass.call( this );
this.scene = scene;
this.camera = camera;
this.overrideMaterial = overrideMaterial;
this.clearColor = clearColor;
this.clearAlpha = ( clearAlpha !== undefined ) ? clearAlpha : 0;
this.clear = false;
this.needsSwap = false;
};
THREE.RenderPass.prototype = Object.assign( Object.create( THREE.Pass.prototype ), {
constructor: THREE.RenderPass,
render: function ( renderer, writeBuffer, readBuffer, delta, maskActive ) {
var oldAutoClear = renderer.autoClear;
renderer.autoClear = false;
this.scene.overrideMaterial = this.overrideMaterial;
var oldClearColor, oldClearAlpha;
if ( this.clearColor ) {
oldClearColor = renderer.getClearColor().getHex();
oldClearAlpha = renderer.getClearAlpha();
renderer.setClearColor( this.clearColor, this.clearAlpha );
}
renderer.render( this.scene, this.camera, this.renderToScreen ? null : readBuffer, this.clear );
if ( this.clearColor ) {
renderer.setClearColor( oldClearColor, oldClearAlpha );
}
this.scene.overrideMaterial = null;
renderer.autoClear = oldAutoClear;
}
} );
|
/**
* @author alteredq / http://alteredqualia.com/
*/
THREE.RenderPass = function ( scene, camera, overrideMaterial, clearColor, clearAlpha ) {
THREE.Pass.call( this );
this.scene = scene;
this.camera = camera;
this.overrideMaterial = overrideMaterial;
this.clearColor = clearColor;
this.clearAlpha = ( clearAlpha !== undefined ) ? clearAlpha : 0;
this.clear = true;
this.needsSwap = false;
};
THREE.RenderPass.prototype = Object.assign( Object.create( THREE.Pass.prototype ), {
constructor: THREE.RenderPass,
render: function ( renderer, writeBuffer, readBuffer, delta, maskActive ) {
var oldAutoClear = renderer.autoClear;
renderer.autoClear = false;
this.scene.overrideMaterial = this.overrideMaterial;
var oldClearColor, oldClearAlpha;
if ( this.clearColor ) {
oldClearColor = renderer.getClearColor().getHex();
oldClearAlpha = renderer.getClearAlpha();
renderer.setClearColor( this.clearColor, this.clearAlpha );
}
renderer.render( this.scene, this.camera, this.renderToScreen ? null : readBuffer, this.clear );
if ( this.clearColor ) {
renderer.setClearColor( oldClearColor, oldClearAlpha );
}
this.scene.overrideMaterial = null;
renderer.autoClear = oldAutoClear;
}
} );
|
Change a test name to CamelCase instead of snake, to conform with TFX convention.
PiperOrigin-RevId: 246340014
|
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for tfx.utils.dsl_utils."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Standard Imports
import tensorflow as tf
from tfx.utils import dsl_utils
class DslUtilsTest(tf.test.TestCase):
def testCsvInput(self):
[csv] = dsl_utils.csv_input(uri='path')
self.assertEqual('ExternalPath', csv.type_name)
self.assertEqual('path', csv.uri)
if __name__ == '__main__':
tf.test.main()
|
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for tfx.utils.dsl_utils."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Standard Imports
import tensorflow as tf
from tfx.utils import dsl_utils
class DslUtilsTest(tf.test.TestCase):
def csv_input(self):
[csv] = dsl_utils.csv_input(uri='path')
self.assertEqual('ExternalPath', csv.type_name)
self.assertEqual('path', csv.uri)
if __name__ == '__main__':
tf.test.main()
|
Migrate profile tests to pytest
|
from unittest.mock import MagicMock, patch
from buffpy.models.profile import Profile, PATHS
mocked_response = {
"name": "me",
"service": "twiter",
"id": 1
}
def test_profile_schedules_getter():
""" Should retrieve profiles from buffer's API. """
mocked_api = MagicMock()
mocked_api.get.return_value = "123"
profile = Profile(mocked_api, mocked_response)
assert profile.schedules == "123"
mocked_api.get.assert_called_once_with(url=PATHS["GET_SCHEDULES"].format("1"))
def test_profile_schedules_setter():
""" Should update profile's schedules. """
mocked_api = MagicMock()
mocked_api.get.return_value = "123"
profile = Profile(mocked_api, mocked_response)
profile.schedules = {
"times": ["mo"]
}
mocked_api.post.assert_called_once_with(
url=PATHS["UPDATE_SCHEDULES"].format("1"),
data="schedules[0][times][]=mo&")
def test_profile_updates():
""" Should properly call buffer's updates. """
mocked_api = MagicMock()
with patch("buffpy.models.profile.Updates") as mocked_updates:
profile = Profile(api=mocked_api, raw_response={"id": 1})
assert profile.updates
mocked_updates.assert_called_once_with(api=mocked_api, profile_id=1)
|
from nose.tools import eq_
from mock import MagicMock, patch
from buffpy.models.profile import Profile, PATHS
mocked_response = {
'name': 'me',
'service': 'twiter',
'id': 1
}
def test_profile_schedules_getter():
'''
Test schedules gettering from buffer api
'''
mocked_api = MagicMock()
mocked_api.get.return_value = '123'
profile = Profile(mocked_api, mocked_response)
eq_(profile.schedules, '123')
mocked_api.get.assert_called_once_with(url = PATHS['GET_SCHEDULES'] % 1)
def test_profile_schedules_setter():
'''
Test schedules setter from buffer api
'''
mocked_api = MagicMock()
mocked_api.get.return_value = '123'
profile = Profile(mocked_api, mocked_response)
profile.schedules = {
'times': ['mo']
}
mocked_api.post.assert_called_once_with(url=PATHS['UPDATE_SCHEDULES'] % 1,
data='schedules[0][times][]=mo&')
def test_profile_updates():
'''
Test updates relationship with a profile
'''
mocked_api = MagicMock()
with patch('buffpy.models.profile.Updates') as mocked_updates:
profile = Profile(api=mocked_api, raw_response={'id': 1})
updates = profile.updates
mocked_updates.assert_called_once_with(api=mocked_api, profile_id=1)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.