text
stringlengths 16
4.96k
| positive
stringlengths 321
2.24k
| negative
stringlengths 310
2.21k
|
|---|---|---|
Adjust name and description for consistency
http://jira.diamond.ac.uk/browse/DATAACQTEAM-240
|
package org.eclipse.scanning.points;
import java.util.Iterator;
import org.eclipse.scanning.api.points.AbstractGenerator;
import org.eclipse.scanning.api.points.GeneratorException;
import org.eclipse.scanning.api.points.IPosition;
import org.eclipse.scanning.api.points.models.ArrayModel;
public class ArrayGenerator extends AbstractGenerator<ArrayModel, IPosition> {
ArrayGenerator() {
setLabel("Array Scan");
setDescription("Creates a scan from an array of positions");
}
@Override
public int size() throws GeneratorException {
if (containers!=null) throw new GeneratorException("Cannot deal with regions in an array scan!");
if (model.getPositions() == null) {
return 0;
}
return model.getPositions().length;
}
@Override
public Iterator<IPosition> iterator() {
return new ArrayIterator(this);
}
}
|
package org.eclipse.scanning.points;
import java.util.Iterator;
import org.eclipse.scanning.api.points.AbstractGenerator;
import org.eclipse.scanning.api.points.GeneratorException;
import org.eclipse.scanning.api.points.IPosition;
import org.eclipse.scanning.api.points.models.ArrayModel;
public class ArrayGenerator extends AbstractGenerator<ArrayModel, IPosition> {
ArrayGenerator() {
setLabel("Position List Scan");
setDescription("Creates a scan from a list of positions");
}
@Override
public int size() throws GeneratorException {
if (containers!=null) throw new GeneratorException("Cannot deal with regions in a position list scan!");
if (model.getPositions() == null) {
return 0;
}
return model.getPositions().length;
}
@Override
public Iterator<IPosition> iterator() {
return new ArrayIterator(this);
}
}
|
Fix message panel close button bug
|
const $ = require('jquery');
export default function( msg ) {
$('.message-panel .message-content').html( msg );
var $panel = $('.message-panel');
var $textbox = $('#textbox');
$panel.removeClass('hidden');
let stickyWatch = setInterval(function(){
if ( $textbox.offset().top < 0 ) {
$panel.css('margin-left', $panel.css('margin-left') );
$panel.addClass('stuck');
} else {
$panel.removeClass('stuck');
}
},50);
$('.close-message-panel').click(function(){
close();
});
function close() {
$('.message-panel').addClass('hidden');
clearInterval(stickyWatch);
}
}
|
const $ = require('jquery');
export default function( msg ) {
$('.message-panel .message-content').html( msg );
var $panel = $('.message-panel');
var $textbox = $('#textbox');
$panel.removeClass('hidden');
let stickyWatch = setInterval(function(){
if ( $textbox.offset().top < 0 ) {
$panel.css('margin-left', $panel.css('margin-left') );
$panel.addClass('stuck');
} else {
$panel.removeClass('stuck');
}
},50);
$('.close-message-panel').click(function(){
oT.message.close();
});
function close() {
$('.message-panel').addClass('hidden');
clearInterval(stickyWatch);
}
}
|
Exclude /actuator/health from the authenticated resources
|
package com.rbmhtechnology.apidocserver.security;
import static org.springframework.boot.test.context.SpringBootTest.WebEnvironment.RANDOM_PORT;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.redirectedUrl;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.servlet.MockMvc;
@RunWith(SpringRunner.class)
@SpringBootTest(webEnvironment = RANDOM_PORT)
@AutoConfigureMockMvc
@ActiveProfiles("openid")
public class OpenIdConnectSecurityConfiguredShould {
@Autowired
private MockMvc mockMvc;
@Test
public void forward_to_OIDC() throws Exception {
mockMvc.perform(get("/"))
.andExpect(status().isFound())
.andExpect(redirectedUrl("http://localhost/openid_connect_login"));
}
@Test
public void healthcheck_remains_unauthenticated() throws Exception {
mockMvc.perform(get("/actuator/health"))
.andExpect(status().isOk());
}
}
|
package com.rbmhtechnology.apidocserver.security;
import static org.springframework.boot.test.context.SpringBootTest.WebEnvironment.RANDOM_PORT;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.redirectedUrl;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.servlet.MockMvc;
@RunWith(SpringRunner.class)
@SpringBootTest(webEnvironment = RANDOM_PORT)
@AutoConfigureMockMvc
@ActiveProfiles("openid")
public class OpenIdConnectSecurityConfiguredShould {
@Autowired
private MockMvc mockMvc;
@Test
public void forward_to_OIDC() throws Exception {
mockMvc.perform(get("/"))
.andExpect(status().isFound())
.andExpect(redirectedUrl("http://localhost/openid_connect_login"));
}
@Test
public void healthcheck_remains_unauthenticated() throws Exception {
mockMvc.perform(get("/health"))
.andExpect(status().isOk());
}
}
|
Handle new sublime syntax: bash
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by NotSqrt
# Copyright (c) 2013 NotSqrt
#
# License: MIT
#
"""
This module exports the Shellcheck plugin class.
Example output with --format gcc
-:230:7: warning: Quote this to prevent word splitting. [SC2046]
-:230:7: note: Useless echo? Instead of 'echo $(cmd)', just use 'cmd'. [SC2005]
-:230:158: note: Double quote to prevent globbing and word splitting. [SC2086]
-:234:10: error: Add double quotes around ${VAR[@]}, otherwise it's just like $* and breaks on spaces. [SC2068]
"""
from SublimeLinter.lint import Linter
class Shellcheck(Linter):
"""Provides an interface to shellcheck."""
syntax = ('shell-unix-generic', 'bash')
cmd = 'shellcheck --format gcc -'
regex = (
r'^.+?:(?P<line>\d+):(?P<col>\d+): '
r'(?:(?P<error>error)|(?P<warning>(warning|note))): '
r'(?P<message>.+)$'
)
defaults = {
'--exclude=,': ''
}
inline_overrides = 'exclude'
comment_re = r'\s*#'
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by NotSqrt
# Copyright (c) 2013 NotSqrt
#
# License: MIT
#
"""
This module exports the Shellcheck plugin class.
Example output with --format gcc
-:230:7: warning: Quote this to prevent word splitting. [SC2046]
-:230:7: note: Useless echo? Instead of 'echo $(cmd)', just use 'cmd'. [SC2005]
-:230:158: note: Double quote to prevent globbing and word splitting. [SC2086]
-:234:10: error: Add double quotes around ${VAR[@]}, otherwise it's just like $* and breaks on spaces. [SC2068]
"""
from SublimeLinter.lint import Linter
class Shellcheck(Linter):
"""Provides an interface to shellcheck."""
syntax = 'shell-unix-generic'
cmd = 'shellcheck --format gcc -'
regex = (
r'^.+?:(?P<line>\d+):(?P<col>\d+): '
r'(?:(?P<error>error)|(?P<warning>(warning|note))): '
r'(?P<message>.+)$'
)
defaults = {
'--exclude=,': ''
}
inline_overrides = 'exclude'
comment_re = r'\s*#'
|
lib/helpers: Use flask.flash instead of tg.flash
|
# -*- coding: utf-8 -*-
"""WebHelpers used in SkyLines."""
from __future__ import absolute_import
import datetime
import simplejson as json
from urllib import urlencode
from flask import flash
from webhelpers import date, feedgenerator, html, number, misc, text
from .string import *
from .country import *
from skylines.lib.formatter.numbers import *
from skylines.lib.formatter.datetime import *
from skylines.lib.formatter.units import *
from skylines.lib.markdown import markdown
# The dict implementation of Jinja2 only works for keyword parameters,
# but not for merging to dictionaries. We export the builtin Python dict()
# function here to get around that problem for building URLs.
dict = dict
# Jinja2 doesn't seem to have min/max... strange!
min = min
max = max
def url(base_url='/', params={}):
if not isinstance(base_url, basestring) and hasattr(base_url, '__iter__'):
base_url = '/'.join(base_url)
if params:
return '?'.join((base_url, urlencode(params)))
return base_url
|
# -*- coding: utf-8 -*-
"""WebHelpers used in SkyLines."""
from __future__ import absolute_import
import datetime
import simplejson as json
from urllib import urlencode
from tg import flash
from webhelpers import date, feedgenerator, html, number, misc, text
from .string import *
from .country import *
from skylines.lib.formatter.numbers import *
from skylines.lib.formatter.datetime import *
from skylines.lib.formatter.units import *
from skylines.lib.markdown import markdown
# The dict implementation of Jinja2 only works for keyword parameters,
# but not for merging to dictionaries. We export the builtin Python dict()
# function here to get around that problem for building URLs.
dict = dict
# Jinja2 doesn't seem to have min/max... strange!
min = min
max = max
def url(base_url='/', params={}):
if not isinstance(base_url, basestring) and hasattr(base_url, '__iter__'):
base_url = '/'.join(base_url)
if params:
return '?'.join((base_url, urlencode(params)))
return base_url
|
Increment minor version to 2.15 to prepare for a new release.
-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=159890586
|
/**
* Copyright 2015 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.devtools.cdbg.debuglets.java;
/**
* Defines the version of the Java Cloud Debugger agent.
*/
public final class GcpDebugletVersion {
/**
* Major version of the debugger.
* All agents of the same major version are compatible with each other. In other words an
* application can mix different agents with the same major version within the same debuggee.
*/
public static final int MAJOR_VERSION = 2;
/**
* Minor version of the agent.
*/
public static final int MINOR_VERSION = 15;
/**
* Debugger agent version string in the format of MAJOR.MINOR.
*/
public static final String VERSION = String.format("%d.%d", MAJOR_VERSION, MINOR_VERSION);
/**
* Main function to print the version string.
*/
public static void main(String[] args) {
System.out.println(VERSION);
}
}
|
/**
* Copyright 2015 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.devtools.cdbg.debuglets.java;
/**
* Defines the version of the Java Cloud Debugger agent.
*/
public final class GcpDebugletVersion {
/**
* Major version of the debugger.
* All agents of the same major version are compatible with each other. In other words an
* application can mix different agents with the same major version within the same debuggee.
*/
public static final int MAJOR_VERSION = 2;
/**
* Minor version of the agent.
*/
public static final int MINOR_VERSION = 14;
/**
* Debugger agent version string in the format of MAJOR.MINOR.
*/
public static final String VERSION = String.format("%d.%d", MAJOR_VERSION, MINOR_VERSION);
/**
* Main function to print the version string.
*/
public static void main(String[] args) {
System.out.println(VERSION);
}
}
|
Load MapQuest tiles over TLS.
|
$(function () {
try {
var map = L.map('map');
}
catch(e){
return true;
}
var lat = $('.map-view').data("lat").toString().split(","),
long = $('.map-view').data("long").toString().split(",");
L.tileLayer('https://otile{s}-s.mqcdn.com/tiles/1.0.0/map/{z}/{x}/{y}.jpg', {
subdomains: '1234',
maxZoom: 7,
attribution: '© <a href="http://osm.org/copyright">OpenStreetMap</a> contributors. '
+ 'Tiles Courtesy of <a href="http://www.mapquest.com/" target="_blank">MapQuest</a>'
+ '<img src="https://developer.mapquest.com/sites/default/files/mapquest/osm/mq_logo.png">'
}).addTo(map);
for (var i = 0; i < lat.length; i++) {
map.setView([lat[i], long[i]], 6);
L.marker([lat[i], long[i]]).addTo(map);
}
});
|
$(function () {
try {
var map = L.map('map');
}
catch(e){
return true;
}
var lat = $('.map-view').data("lat").toString().split(","),
long = $('.map-view').data("long").toString().split(",");
L.tileLayer('http://otile{s}.mqcdn.com/tiles/1.0.0/map/{z}/{x}/{y}.jpg', {
subdomains: '1234',
maxZoom: 7,
attribution: '© <a href="http://osm.org/copyright">OpenStreetMap</a> contributors. '
+ 'Tiles Courtesy of <a href="http://www.mapquest.com/" target="_blank">MapQuest</a>'
+ '<img src="https://developer.mapquest.com/sites/default/files/mapquest/osm/mq_logo.png">'
}).addTo(map);
for (var i = 0; i < lat.length; i++) {
map.setView([lat[i], long[i]], 6);
L.marker([lat[i], long[i]]).addTo(map);
}
});
|
Bring back commented lines of code.
|
var Model;
module("Ember.RecordArray", {
setup: function() {
Model = Ember.Model.extend({
id: Ember.attr(),
name: Ember.attr()
});
Model.adapter = Ember.FixtureAdapter.create();
Model.FIXTURES = [
{id: 1, name: 'Erik'},
{id: 2, name: 'Stefan'},
{id: 3, name: 'Kris'}
];
},
teardown: function() { }
});
test("when called with findMany, should contain an array of the IDs contained in the RecordArray", function() {
var records = Ember.run(Model, Model.find, [1,2,3]);
// test("must be created with a modelClass property", function() {
// throws(function() {
// Ember.RecordArray.create();
// }, /RecordArrays must be created with a modelClass/);
// });
deepEqual(records.get('_ids'), [1,2,3]);
equal(records.get('length'), 0);
ok(!records.get('isLoaded'));
stop();
Ember.run(records, records.then, function() {
start();
equal(records.get('length'), 3);
});
});
|
var Model;
module("Ember.RecordArray", {
setup: function() {
Model = Ember.Model.extend({
id: Ember.attr(),
name: Ember.attr()
});
Model.adapter = Ember.FixtureAdapter.create();
Model.FIXTURES = [
{id: 1, name: 'Erik'},
{id: 2, name: 'Stefan'},
{id: 3, name: 'Kris'}
];
},
teardown: function() { }
});
test("when called with findMany, should contain an array of the IDs contained in the RecordArray", function() {
var records = Ember.run(Model, Model.find, [1,2,3]);
deepEqual(records.get('_ids'), [1,2,3]);
equal(records.get('length'), 0);
ok(!records.get('isLoaded'));
stop();
Ember.run(records, records.then, function() {
start();
equal(records.get('length'), 3);
});
});
|
Change 'language' to 'syntax', that is more precise terminology.
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Aparajita Fishman
# Copyright (c) 2013 Aparajita Fishman
#
# Project: https://github.com/SublimeLinter/SublimeLinter-contrib-pep257
# License: MIT
#
"""This module exports the PEP257 plugin linter class."""
import os
from SublimeLinter.lint import highlight, PythonLinter
class PEP257(PythonLinter):
"""Provides an interface to the pep257 python module/script."""
syntax = 'python'
cmd = ('pep257@python', '-')
regex = r'^.+?:(?P<line>\d+):(?P<col>\d+): (?P<message>.+)'
default_type = highlight.WARNING
line_col_base = (1, 0) # pep257 uses one-based line and zero-based column numbers
module = 'pep257'
def check(self, code, filename):
"""Run pep257 on code and return the output."""
return self.module.check_source(code, os.path.basename(filename))
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Aparajita Fishman
# Copyright (c) 2013 Aparajita Fishman
#
# Project: https://github.com/SublimeLinter/SublimeLinter-contrib-pep257
# License: MIT
#
"""This module exports the PEP257 plugin linter class."""
import os
from SublimeLinter.lint import highlight, PythonLinter
class PEP257(PythonLinter):
"""Provides an interface to the pep257 python module/script."""
language = 'python'
cmd = ('pep257@python', '-')
regex = r'^.+?:(?P<line>\d+):(?P<col>\d+): (?P<message>.+)'
default_type = highlight.WARNING
line_col_base = (1, 0) # pep257 uses one-based line and zero-based column numbers
module = 'pep257'
def check(self, code, filename):
"""Run pep257 on code and return the output."""
return self.module.check_source(code, os.path.basename(filename))
|
Use list of values and not subquery (less efficient but do not use limit)
|
from django.core.management.base import BaseCommand, CommandError
from api import models
from django.db.models import Count, Q
class Command(BaseCommand):
can_import_settings = True
def handle(self, *args, **options):
if 'NR' in args:
print 'Delete activities of N/R cards'
activities = models.Activity.objects.filter(Q(ownedcard__card__rarity='R') | Q(ownedcard__card__rarity='N'))
count = activities.count()
activities.delete()
print ' Deleted %d activities.' % (count)
print 'Delete activities > 50 per user'
accounts = models.Account.objects.all()
for account in accounts:
to_keep = models.Activity.objects.filter(account=account).order_by('-creation')[:50]
to_delete = models.Activity.objects.filter(account=account).exclude(pk__in=to_keep.values('pk'))
count = to_delete.count()
if count > 0:
to_delete.delete()
print ' %s Deleted %d activities.' % (account, count)
|
from django.core.management.base import BaseCommand, CommandError
from api import models
from django.db.models import Count, Q
class Command(BaseCommand):
can_import_settings = True
def handle(self, *args, **options):
if 'NR' in args:
print 'Delete activities of N/R cards'
activities = models.Activity.objects.filter(Q(ownedcard__card__rarity='R') | Q(ownedcard__card__rarity='N'))
count = activities.count()
activities.delete()
print ' Deleted %d activities.' % (count)
print 'Delete activities > 50 per user'
accounts = models.Account.objects.all()
for account in accounts:
to_keep = models.Activity.objects.filter(account=account).order_by('-creation')[:50]
to_delete = models.Activity.objects.filter(account=account).exclude(pk__in=to_keep)
count = to_delete.count()
if count > 0:
to_delete.delete()
print ' %s Deleted %d activities.' % (account, count)
|
Fix issue where data was not being cleared upon logout
|
const initialState = {
profile: { data: null },
name: { data: null },
image: { data: null },
barcode: { data: null }
}
function studentProfileReducer(state = initialState, action) {
const newState = { ...state }
switch (action.type) {
case 'SET_STUDENT_PROFILE':
newState.profile.data = action.profile
return newState
case 'SET_STUDENT_NAME':
newState.name.data = action.name
return newState
case 'SET_STUDENT_PHOTO':
newState.image.data = action.image
return newState
case 'SET_STUDENT_BARCODE':
newState.barcode.data = action.barcode
return newState
case 'CLEAR_STUDENT_PROFILE_DATA':
return {
profile: { data: null },
name: { data: null },
image: { data: null },
barcode: { data: null }
}
default:
return state
}
}
module.exports = studentProfileReducer
|
const initialState = {
profile: { data: null },
name: { data: null },
image: { data: null },
barcode: { data: null }
}
function studentProfileReducer(state = initialState, action) {
const newState = { ...state }
switch (action.type) {
case 'SET_STUDENT_PROFILE':
newState.profile.data = action.profile
return newState
case 'SET_STUDENT_NAME':
newState.name.data = action.name
return newState
case 'SET_STUDENT_PHOTO':
newState.image.data = action.image
return newState
case 'SET_STUDENT_BARCODE':
newState.barcode.data = action.barcode
return newState
case 'CLEAR_STUDENT_PROFILE_DATA':
return initialState
default:
return state
}
}
module.exports = studentProfileReducer
|
fix: Add release tag in sentry
|
const { ApolloServer } = require('apollo-server-micro')
const { schema } = require('./schema')
const { createErrorFormatter, sentryIgnoreErrors } = require('./errors')
module.exports = function createHandler(options) {
let Sentry
if (options.sentryDsn) {
Sentry = require('@sentry/node')
Sentry.init({
dsn: options.sentryDsn,
release: `graphql-rss-parser@${options.version}`,
environment: process.env.NODE_ENV,
ignoreErrors: sentryIgnoreErrors,
onFatalError(error) {
console.error(error, error.response)
},
debug: process.env.DEBUG_SENTRY == 'true',
})
}
const formatError = createErrorFormatter(Sentry)
const apolloServer = new ApolloServer({ schema, formatError })
return apolloServer.createHandler({ path: '/' })
}
|
const { ApolloServer } = require('apollo-server-micro')
const { schema } = require('./schema')
const { createErrorFormatter, sentryIgnoreErrors } = require('./errors')
module.exports = function createHandler(options) {
let Sentry
if (options.sentryDsn) {
Sentry = require('@sentry/node')
Sentry.init({
dsn: options.sentryDsn,
release: options.release,
environment: process.env.NODE_ENV,
ignoreErrors: sentryIgnoreErrors,
onFatalError(error) {
console.error(error, error.response)
},
debug: process.env.DEBUG_SENTRY == 'true',
})
}
const formatError = createErrorFormatter(Sentry)
const apolloServer = new ApolloServer({ schema, formatError })
return apolloServer.createHandler({ path: '/' })
}
|
Use the OAuthLibCore object defined at the module level.
|
from django.contrib.auth import get_user_model
from .oauth2_backends import get_oauthlib_core
UserModel = get_user_model()
OAuthLibCore = get_oauthlib_core()
class OAuth2Backend(object):
"""
Authenticate against an OAuth2 access token
"""
def authenticate(self, **credentials):
request = credentials.get('request')
if request is not None:
valid, r = OAuthLibCore.verify_request(request, scopes=[])
if valid:
return r.user
return None
def get_user(self, user_id):
try:
return UserModel.objects.get(pk=user_id)
except UserModel.DoesNotExist:
return None
|
from django.contrib.auth import get_user_model
from .oauth2_backends import get_oauthlib_core
UserModel = get_user_model()
OAuthLibCore = get_oauthlib_core()
class OAuth2Backend(object):
"""
Authenticate against an OAuth2 access token
"""
def authenticate(self, **credentials):
request = credentials.get('request')
if request is not None:
oauthlib_core = get_oauthlib_core()
valid, r = oauthlib_core.verify_request(request, scopes=[])
if valid:
return r.user
return None
def get_user(self, user_id):
try:
return UserModel.objects.get(pk=user_id)
except UserModel.DoesNotExist:
return None
|
Fix Racket on FreeBSD after openat changes
@quantum5 this feels iffy, but I think it's (marginally) better than allowing all .racket folders to be read
|
from dmoj.executors.base_executor import CompiledExecutor
from dmoj.executors.mixins import ScriptDirectoryMixin
import os
class Executor(ScriptDirectoryMixin, CompiledExecutor):
ext = '.rkt'
name = 'RKT'
fs = ['/etc/nsswitch.conf$', '/etc/passwd$', os.path.expanduser('~/\.racket/.*?')]
command = 'racket'
syscalls = ['epoll_create', 'epoll_wait', 'poll', 'select']
address_grace = 131072
test_program = '''\
#lang racket
(displayln (read-line))
'''
def get_compile_args(self):
return [self.runtime_dict['raco'], 'make', self._code]
def get_cmdline(self):
return [self.get_command(), self._code]
def get_executable(self):
return self.get_command()
@classmethod
def initialize(cls, sandbox=True):
if 'raco' not in cls.runtime_dict:
return False
return super(Executor, cls).initialize(sandbox)
@classmethod
def get_versionable_commands(cls):
return [('racket', cls.get_command())]
@classmethod
def get_find_first_mapping(cls):
return {
'racket': ['racket'],
'raco': ['raco']
}
|
from dmoj.executors.base_executor import CompiledExecutor
from dmoj.executors.mixins import ScriptDirectoryMixin
class Executor(ScriptDirectoryMixin, CompiledExecutor):
ext = '.rkt'
name = 'RKT'
fs = ['/etc/nsswitch.conf$', '/etc/passwd$']
command = 'racket'
syscalls = ['epoll_create', 'epoll_wait', 'poll', 'select']
address_grace = 131072
test_program = '''\
#lang racket
(displayln (read-line))
'''
def get_compile_args(self):
return [self.runtime_dict['raco'], 'make', self._code]
def get_cmdline(self):
return [self.get_command(), self._code]
def get_executable(self):
return self.get_command()
@classmethod
def initialize(cls, sandbox=True):
if 'raco' not in cls.runtime_dict:
return False
return super(Executor, cls).initialize(sandbox)
@classmethod
def get_versionable_commands(cls):
return [('racket', cls.get_command())]
@classmethod
def get_find_first_mapping(cls):
return {
'racket': ['racket'],
'raco': ['raco']
}
|
Add mutation detection in dev by default
|
import { createStore, compose, applyMiddleware, combineReducers } from 'redux'
import { composeWithDevTools } from 'redux-devtools-extension'
import thunk from 'redux-thunk'
import immutableStateInvariant from 'redux-immutable-state-invariant'
import isPlainObject from './isPlainObject'
const IS_PRODUCTION = process.env.NODE_ENV === 'production'
export function getDefaultMiddleware(isProduction = IS_PRODUCTION) {
const middlewareArray = [thunk]
if (!isProduction) {
middlewareArray.unshift(immutableStateInvariant())
}
return middlewareArray
}
export function configureStore(options = {}) {
const {
reducer,
middleware = getDefaultMiddleware(),
devTools = true,
preloadedState,
enhancers = []
} = options
let rootReducer
if (typeof reducer === 'function') {
rootReducer = reducer
} else if (isPlainObject(reducer)) {
rootReducer = combineReducers(reducer)
} else {
throw new Error(
'Reducer argument must be a function or an object of functions that can be passed to combineReducers'
)
}
const middlewareEnhancer = applyMiddleware(...middleware)
const storeEnhancers = [middlewareEnhancer, ...enhancers]
let finalCompose = devTools ? composeWithDevTools : compose
const composedEnhancer = finalCompose(...storeEnhancers)
const store = createStore(rootReducer, preloadedState, composedEnhancer)
return store
}
|
import { createStore, compose, applyMiddleware, combineReducers } from 'redux'
import { composeWithDevTools } from 'redux-devtools-extension'
import thunk from 'redux-thunk'
import isPlainObject from './isPlainObject'
export function getDefaultMiddleware() {
return [thunk]
}
export function configureStore(options = {}) {
const {
reducer,
middleware = getDefaultMiddleware(),
devTools = true,
preloadedState,
enhancers = []
} = options
let rootReducer
if (typeof reducer === 'function') {
rootReducer = reducer
} else if (isPlainObject(reducer)) {
rootReducer = combineReducers(reducer)
} else {
throw new Error(
'Reducer argument must be a function or an object of functions that can be passed to combineReducers'
)
}
const middlewareEnhancer = applyMiddleware(...middleware)
const storeEnhancers = [middlewareEnhancer, ...enhancers]
let finalCompose = devTools ? composeWithDevTools : compose
const composedEnhancer = finalCompose(...storeEnhancers)
const store = createStore(rootReducer, preloadedState, composedEnhancer)
return store
}
|
Refactor some book model variables
Add an edition field, help texts to some of the fields.
Change behaviour of the __str__ method to be more informative.
|
from django.db import models
from datetime import datetime
from django.utils import timezone
# Create your models here.
class Book(models.Model):
title = models.CharField(max_length=200)
edition = models.SmallIntegerField(default=1)
author = models.CharField(max_length=200)
year = models.DateTimeField('year published',
help_text="Please use the following format: <em>YYYY-MM-DD</em>.",
blank=True)
pages = models.IntegerField(default=0)
isbn_10 = models.IntegerField(default=0,help_text="Do not include dashes")
isbn_13 = models.IntegerField(default=0,help_text="Do not include dashes")
description = models.TextField()
cover_image = models.ImageField('cover Image',
upload_to='cover_pics/%Y-%m-%d/',
null=True,
blank=True)
date_added = models.DateTimeField(default=datetime.now)
def __str__(self):
if self.edition==1:
nth="st"
elif self.edition==2:
nth="nd"
elif self.edition==3:
nth="rd"
else : nth="th"
return self.title + ", "+ str(self.edition)+nth + " Edition by " + self.author
def was_added_recently(self):
return self.date_added >= timezone.now() - datetime.timedelta(days=30)
|
from django.db import models
from datetime import datetime
from django.utils import timezone
# Create your models here.
class Book(models.Model):
title = models.CharField(max_length=200)
author = models.CharField(max_length=200)
year = models.DateTimeField('year published',
help_text="Please use the following format: <em>YYYY-MM-DD</em>.")
pages = models.IntegerField(default=0)
isbn_10 = models.IntegerField(default=0)
isbn_13 = models.IntegerField(default=0)
description = models.TextField()
cover_image = models.ImageField('cover Image',
upload_to='cover_pics/%Y-%m-%d/',
null=True,
blank=True)
date_added = models.DateTimeField(default=datetime.now)
def __str__(self):
return self.title + " by " + self.author
def was_added_recently(self):
return self.date_added >= timezone.now() - datetime.timedelta(days=30)
|
Remove uneccesary console.log and storage.
|
var app = require('./server/server-config.js');
var models = require('./server/db/orm-model.js');
var models = models();
var User = models.User;
var bodyParser = require('body-parser');
// parse application/x-www-form-urlencoded
app.use(bodyParser.urlencoded({ extended: false }));
// parse application/json
app.use(bodyParser.json());
app.set('port', process.env.PORT || 4568);
app.listen(app.get('port'));
//will replace this with db
var storage = {};
app.post('/', function(req, res) {
//User.findOrCreate({where: {userId: req.body.user_id.slice(7)})
User.upsert({
userId: req.body.user_id.slice(7),
email: req.body.email,
picture: req.body.picture,
name: req.body.name,
nickname:req.body.nickname});
res.sendStatus(200);
});
app.get('/', function(req, res){
res.sendStatus(200);
});
console.log('Server listening on port ', app.get('port'));
|
var app = require('./server/server-config.js');
var models = require('./server/db/orm-model.js');
var models = models();
var User = models.User;
var bodyParser = require('body-parser');
// parse application/x-www-form-urlencoded
app.use(bodyParser.urlencoded({ extended: false }));
// parse application/json
app.use(bodyParser.json());
app.set('port', process.env.PORT || 4568);
app.listen(app.get('port'));
//will replace this with db
var storage = {};
app.post('/', function(req, res) {
storage[req.body.user_id] = req.body;
console.log(storage);
//User.findOrCreate({where: {userId: req.body.user_id.slice(7)})
User.upsert({
userId: req.body.user_id.slice(7),
email: req.body.email,
picture: req.body.picture,
name: req.body.name,
nickname:req.body.nickname});
res.sendStatus(200);
});
app.get('/', function(req, res){
console.log(storage);
res.sendStatus(200);
});
console.log('Server listening on port ', app.get('port'));
|
Fix functional shell tests in travis
Hostname in travis contains random numbers meaning that it occasionally may
contain a PID as well. Thus it must be replaced first.
|
#!/usr/bin/env python
from __future__ import unicode_literals
import os
import socket
import sys
import codecs
fname = sys.argv[1]
new_fname = fname + '.new'
pid_fname = 'tests/shell/3rd/pid'
with open(pid_fname, 'r') as P:
pid = P.read().strip()
hostname = socket.gethostname()
user = os.environ['USER']
with codecs.open(fname, 'r', encoding='utf-8') as R:
with codecs.open(new_fname, 'w', encoding='utf-8') as W:
found_cd = False
for line in R:
if not found_cd:
found_cd = ('cd tests/shell/3rd' in line)
continue
if 'true is the last line' in line:
break
line = line.translate({
ord('\r'): None
})
line = line.replace(hostname, 'HOSTNAME')
line = line.replace(user, 'USER')
line = line.replace(pid, 'PID')
W.write(line)
os.rename(new_fname, fname)
|
#!/usr/bin/env python
from __future__ import unicode_literals
import os
import socket
import sys
import codecs
fname = sys.argv[1]
new_fname = fname + '.new'
pid_fname = 'tests/shell/3rd/pid'
with open(pid_fname, 'r') as P:
pid = P.read().strip()
hostname = socket.gethostname()
user = os.environ['USER']
with codecs.open(fname, 'r', encoding='utf-8') as R:
with codecs.open(new_fname, 'w', encoding='utf-8') as W:
found_cd = False
for line in R:
if not found_cd:
found_cd = ('cd tests/shell/3rd' in line)
continue
if 'true is the last line' in line:
break
line = line.translate({
ord('\r'): None
})
line = line.replace(pid, 'PID')
line = line.replace(hostname, 'HOSTNAME')
line = line.replace(user, 'USER')
W.write(line)
os.rename(new_fname, fname)
|
Remove unnecessary variable in importantRule
|
'use strict';
var path = require('path');
module.exports = function (options) {
var filename = path.basename(options.path);
var config = options.config;
var node = options.node;
var value;
// Bail if the linter isn't wanted
if (!config.importantRule || (config.importantRule && !config.importantRule.enabled)) {
return null;
}
// Not applicable, bail
if (node.type !== 'declaration') {
return null;
}
node.forEach('value', function (element) {
value = element.first('important');
});
if (value) {
return {
column: value.start.column,
file: filename,
line: value.start.line,
linter: 'importantRule',
message: '!important should not be used.'
};
}
return null;
};
|
'use strict';
var path = require('path');
module.exports = function (options) {
var filename = path.basename(options.path);
var config = options.config;
var node = options.node;
var message;
var value;
// Bail if the linter isn't wanted
if (!config.importantRule || (config.importantRule && !config.importantRule.enabled)) {
return null;
}
// Not applicable, bail
if (node.type !== 'declaration') {
return null;
}
node.forEach('value', function (element) {
value = element.first('important');
});
if (value) {
message = '!important should not be used.';
}
if (message) {
return {
column: value.start.column,
file: filename,
line: value.start.line,
linter: 'importantRule',
message: message
};
}
return null;
};
|
Attach as_jsonapi to models for easy serialization
|
from __future__ import unicode_literals
registry = {}
bound_registry = {}
def register(cls):
registry[cls.api_type] = cls
def as_jsonapi(self):
return cls(self).serialize()
cls.model.as_jsonapi = as_jsonapi
return cls
def bind(parent=None, resource=None):
def wrapper(endpointset):
if parent is not None:
endpointset.parent = parent
endpointset.url.parent = parent.url
if resource is not None:
BoundResource = type(
str("Bound{}".format(resource.__class__.__name__)),
(resource,),
{"endpointset": endpointset},
)
endpointset.resource_class = BoundResource
# override registry with bound resource (typically what we want)
registry[resource.api_type] = BoundResource
endpointset.relationships = getattr(endpointset, "relationships", {})
return endpointset
return wrapper
|
from __future__ import unicode_literals
registry = {}
bound_registry = {}
def register(cls):
registry[cls.api_type] = cls
return cls
def bind(parent=None, resource=None):
def wrapper(endpointset):
if parent is not None:
endpointset.parent = parent
endpointset.url.parent = parent.url
if resource is not None:
BoundResource = type(
str("Bound{}".format(resource.__class__.__name__)),
(resource,),
{"endpointset": endpointset},
)
endpointset.resource_class = BoundResource
# override registry with bound resource (typically what we want)
registry[resource.api_type] = BoundResource
endpointset.relationships = getattr(endpointset, "relationships", {})
return endpointset
return wrapper
|
Switch back to using query instead of body
|
var express = require('express');
var Global = require('../global');
var search = require('../commands/search');
var router = express.Router();
router.get('/*', function(req, res) {
var response = '';
var arg = req.query.text;
console.log(req.query); //TODO delete
console.log(req.query.token); //TODO delete
//validate token
if(req.query.token === Global.authToken) {
switch(req.query.command) {
case '/searchin':
response = search(arg, res);
break;
default:
res.status('500');
res.json({
text: 'Unknown command'
});
}
} else {
res.status('403');
res.json({
text: 'Invalid token'
});
}
});
router.post('/*', function(req, res) {
res.status('405');
res.json({
text: 'Method not allowed'
});
});
module.exports = router;
|
var express = require('express');
var Global = require('../global');
var search = require('../commands/search');
var router = express.Router();
router.get('/*', function(req, res) {
var response = '';
var arg = req.body.text;
console.log(req.body); //TODO delete
console.log(req.body.token); //TODO delete
//validate token
if(req.body.token === Global.authToken) {
switch(req.body.command) {
case '/searchin':
response = search(arg, res);
break;
default:
res.status('500');
res.json({
text: 'Unknown command'
});
}
} else {
res.status('403');
res.json({
text: 'Invalid token'
});
}
});
router.post('/*', function(req, res) {
res.status('405');
res.json({
text: 'Method not allowed'
});
});
module.exports = router;
|
Change from beta -> stable for v1.0.x
|
import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name="energenie",
version="1.0.1",
author="Ben Nuttall",
author_email="ben@raspberrypi.org",
description="Remotely control power sockets from the Raspberry Pi",
license="BSD",
keywords=[
"energenie",
"raspberrypi",
],
url="https://github.com/bennuttall/energenie",
packages=[
"energenie",
],
install_requires=[
"RPi.GPIO",
],
long_description=read('README.rst'),
classifiers=[
"Development Status :: 5 - Production/Stable",
"Topic :: Home Automation",
"License :: OSI Approved :: BSD License",
],
)
|
import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name="energenie",
version="1.0.1",
author="Ben Nuttall",
author_email="ben@raspberrypi.org",
description="Remotely control power sockets from the Raspberry Pi",
license="BSD",
keywords=[
"energenie",
"raspberrypi",
],
url="https://github.com/bennuttall/energenie",
packages=[
"energenie",
],
install_requires=[
"RPi.GPIO",
],
long_description=read('README.rst'),
classifiers=[
"Development Status :: 4 - Beta",
"Topic :: Home Automation",
"License :: OSI Approved :: BSD License",
],
)
|
Make sure QuerySet.delete() operation does not bypass protection
This fixes #1
|
from django.db import models
from logicaldelete.query import LogicalDeleteQuerySet
class LogicalDeletedManager(models.Manager):
"""
A manager that serves as the default manager for `logicaldelete.models.Model`
providing the filtering out of logically deleted objects. In addition, it
provides named querysets for getting the deleted objects.
"""
def get_query_set(self):
if self.model:
return LogicalDeleteQuerySet(self.model, using=self._db).filter(
date_removed__isnull=True
)
def all_with_deleted(self):
if self.model:
return super(LogicalDeletedManager, self).get_query_set()
def only_deleted(self):
if self.model:
return super(LogicalDeletedManager, self).get_query_set().filter(
date_removed__isnull=False
)
def get(self, *args, **kwargs):
return self.all_with_deleted().get(*args, **kwargs)
def filter(self, *args, **kwargs):
if "pk" in kwargs:
return self.all_with_deleted().filter(*args, **kwargs)
return self.get_query_set().filter(*args, **kwargs)
|
from django.db import models
class LogicalDeletedManager(models.Manager):
"""
A manager that serves as the default manager for `logicaldelete.models.Model`
providing the filtering out of logically deleted objects. In addition, it
provides named querysets for getting the deleted objects.
"""
def get_query_set(self):
if self.model:
return super(LogicalDeletedManager, self).get_query_set().filter(
date_removed__isnull=True
)
def all_with_deleted(self):
if self.model:
return super(LogicalDeletedManager, self).get_query_set()
def only_deleted(self):
if self.model:
return super(LogicalDeletedManager, self).get_query_set().filter(
date_removed__isnull=False
)
def get(self, *args, **kwargs):
return self.all_with_deleted().get(*args, **kwargs)
def filter(self, *args, **kwargs):
if "pk" in kwargs:
return self.all_with_deleted().filter(*args, **kwargs)
return self.get_query_set().filter(*args, **kwargs)
|
Add missing cascade deletes on user/roles
|
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.security import UserMixin, RoleMixin
db = SQLAlchemy()
### Add models here
roles_users = db.Table('roles_users',
db.Column('user_id', db.Integer(), db.ForeignKey('user.id', ondelete='CASCADE')),
db.Column('role_id', db.Integer(), db.ForeignKey('role.id', ondelete='CASCADE')))
class Role(db.Model, RoleMixin):
id = db.Column(db.Integer(), primary_key=True)
name = db.Column(db.String(80), unique=True)
description = db.Column(db.String(255))
class User(db.Model, UserMixin):
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String(255), unique=True)
password = db.Column(db.String(255))
active = db.Column(db.Boolean())
confirmed_at = db.Column(db.DateTime())
roles = db.relationship('Role', secondary=roles_users,
backref=db.backref('users', lazy='dynamic'))
|
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.security import UserMixin, RoleMixin
db = SQLAlchemy()
### Add models here
roles_users = db.Table('roles_users',
db.Column('user_id', db.Integer(), db.ForeignKey('user.id')),
db.Column('role_id', db.Integer(), db.ForeignKey('role.id')))
class Role(db.Model, RoleMixin):
id = db.Column(db.Integer(), primary_key=True)
name = db.Column(db.String(80), unique=True)
description = db.Column(db.String(255))
class User(db.Model, UserMixin):
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String(255), unique=True)
password = db.Column(db.String(255))
active = db.Column(db.Boolean())
confirmed_at = db.Column(db.DateTime())
roles = db.relationship('Role', secondary=roles_users,
backref=db.backref('users', lazy='dynamic'))
|
Add tmp dummy mail report so that the diagnoser kinda works instead of failing miserably
|
#!/usr/bin/env python
import os
from yunohost.diagnosis import Diagnoser
class MailDiagnoser(Diagnoser):
id_ = os.path.splitext(os.path.basename(__file__))[0].split("-")[1]
cache_duration = 3600
dependencies = ["ip"]
def run(self):
# TODO / FIXME TO BE IMPLEMETED in the future ...
yield dict(meta={},
status="WARNING",
summary=("nothing_implemented_yet", {}))
# Mail blacklist using dig requests (c.f. ljf's code)
# Outgoing port 25 (c.f. code in monitor.py, a simple 'nc -zv yunohost.org 25' IIRC)
# SMTP reachability (c.f. check-smtp to be implemented on yunohost's remote diagnoser)
# ideally, SPF / DMARC / DKIM validation ... (c.f. https://github.com/alexAubin/yunoScripts/blob/master/yunoDKIM.py possibly though that looks horrible)
# check that the mail queue is not filled with hundreds of email pending
# check that the recent mail logs are not filled with thousand of email sending (unusual number of mail sent)
# check for unusual failed sending attempt being refused in the logs ?
def main(args, env, loggers):
return MailDiagnoser(args, env, loggers).diagnose()
|
#!/usr/bin/env python
import os
from yunohost.diagnosis import Diagnoser
class MailDiagnoser(Diagnoser):
id_ = os.path.splitext(os.path.basename(__file__))[0].split("-")[1]
cache_duration = 3600
dependencies = ["ip"]
def run(self):
return # TODO / FIXME TO BE IMPLEMETED in the future ...
# Mail blacklist using dig requests (c.f. ljf's code)
# Outgoing port 25 (c.f. code in monitor.py, a simple 'nc -zv yunohost.org 25' IIRC)
# SMTP reachability (c.f. check-smtp to be implemented on yunohost's remote diagnoser)
# ideally, SPF / DMARC / DKIM validation ... (c.f. https://github.com/alexAubin/yunoScripts/blob/master/yunoDKIM.py possibly though that looks horrible)
# check that the mail queue is not filled with hundreds of email pending
# check that the recent mail logs are not filled with thousand of email sending (unusual number of mail sent)
# check for unusual failed sending attempt being refused in the logs ?
def main(args, env, loggers):
return MailDiagnoser(args, env, loggers).diagnose()
|
Make this the first release candidate.
|
"""
Flask-Selfdoc
-------------
Flask selfdoc automatically creates an online documentation for your flask app.
"""
from setuptools import setup
def readme():
with open('README.md') as f:
return f.read()
setup(
name='Flask-Selfdoc',
version='1.0rc1',
url='http://github.com/jwg4/flask-selfdoc',
license='MIT',
author='Arnaud Coomans',
maintainer='Jack Grahl',
maintainer_email='jack.grahl@gmail.com',
description='Documentation generator for flask',
long_description=readme(),
# py_modules=['flask_autodoc'],
# if you would be using a package instead use packages instead
# of py_modules:
packages=['flask_selfdoc'],
package_data={'flask_selfdoc': ['templates/autodoc_default.html']},
zip_safe=False,
include_package_data=True,
platforms='any',
install_requires=[
'Flask'
],
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
test_suite='tests',
)
|
"""
Flask-Selfdoc
-------------
Flask selfdoc automatically creates an online documentation for your flask app.
"""
from setuptools import setup
def readme():
with open('README.md') as f:
return f.read()
setup(
name='Flask-Selfdoc',
version='1.0',
url='http://github.com/jwg4/flask-selfdoc',
license='MIT',
author='Arnaud Coomans',
maintainer='Jack Grahl',
maintainer_email='jack.grahl@gmail.com',
description='Documentation generator for flask',
long_description=readme(),
# py_modules=['flask_autodoc'],
# if you would be using a package instead use packages instead
# of py_modules:
packages=['flask_selfdoc'],
package_data={'flask_selfdoc': ['templates/autodoc_default.html']},
zip_safe=False,
include_package_data=True,
platforms='any',
install_requires=[
'Flask'
],
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
test_suite='tests',
)
|
Fix wrong paramter name in permit_groups decorator
|
from __future__ import unicode_literals
# Authentication and Authorisation
from functools import wraps
from . import http
def permit(test_func):
'''Decorate a handler to control access'''
def decorator(view_func):
@wraps(view_func)
def _wrapped_view(self, *args, **kwargs):
if test_func(self, *args, **kwargs):
return view_func(self, *args, **kwargs)
return http.Forbidden()
return _wrapped_view
return decorator
permit_logged_in = permit(
lambda self, *args, **kwargs: self.request.user.is_authenticated()
)
permit_staff = permit(
lambda self, *args, **kwargs: self.request.user.is_staff
)
def permit_groups(*groups):
def in_groups(request, *args):
return request.user.groups.filter(name__in=groups).exists()
return permit(
lambda self, *args, **kwargs: in_groups(self.request, *groups)
)
|
from __future__ import unicode_literals
# Authentication and Authorisation
from functools import wraps
from . import http
def permit(test_func):
'''Decorate a handler to control access'''
def decorator(view_func):
@wraps(view_func)
def _wrapped_view(self, *args, **kwargs):
if test_func(self, *args, **kwargs):
return view_func(self, *args, **kwargs)
return http.Forbidden()
return _wrapped_view
return decorator
permit_logged_in = permit(
lambda self, *args, **kwargs: self.request.user.is_authenticated()
)
permit_staff = permit(
lambda self, *args, **kwargs: self.request.user.is_staff
)
def permit_groups(*groups):
def in_groups(request, *args):
return request.user.groups.filter(name__in=args).exists()
return permit(
lambda self, *args, **kwargs: in_groups(self.request, *groups)
)
|
Add method comment to population script for easy deploy
|
from string import ascii_lowercase
import django
import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings')
django.setup()
from breach.models import Target, Victim
endpoint = 'https://dimkarakostas.com/rupture/test.php?ref=%s'
prefix = 'imper'
alphabet = ascii_lowercase
secretlength = 9
target_1 = Target(
endpoint=endpoint,
prefix=prefix,
alphabet=alphabet,
secretlength=secretlength
)
target_1.save()
print 'Created Target:\n\tendpoint: {}\n\tprefix: {}\n\talphabet: {}\n\tsecretlength: {}'.format(endpoint, prefix, alphabet, secretlength)
snifferendpoint = 'http://127.0.0.1:9000'
sourceip = '192.168.1.70'
victim_1 = Victim(
target=target_1,
snifferendpoint=snifferendpoint,
sourceip=sourceip,
# method='serial'
)
victim_1.save()
print 'Created Victim:\n\tvictim_id: {}\n\tsnifferendpoint: {}\n\tsourceip: {}'.format(victim_1.id, snifferendpoint, sourceip)
|
from string import ascii_lowercase
import django
import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings')
django.setup()
from breach.models import Target, Victim
endpoint = 'https://dimkarakostas.com/rupture/test.php?ref=%s'
prefix = 'imper'
alphabet = ascii_lowercase
secretlength = 9
target_1 = Target(
endpoint=endpoint,
prefix=prefix,
alphabet=alphabet,
secretlength=secretlength
)
target_1.save()
print 'Created Target:\n\tendpoint: {}\n\tprefix: {}\n\talphabet: {}\n\tsecretlength: {}'.format(endpoint, prefix, alphabet, secretlength)
snifferendpoint = 'http://127.0.0.1:9000'
sourceip = '192.168.1.70'
victim_1 = Victim(
target=target_1,
snifferendpoint=snifferendpoint,
sourceip=sourceip
)
victim_1.save()
print 'Created Victim:\n\tvictim_id: {}\n\tsnifferendpoint: {}\n\tsourceip: {}'.format(victim_1.id, snifferendpoint, sourceip)
|
Remove cap from map def.
|
package sessmgr
import (
"sync"
"time"
)
type Session struct {
id string
Last time.Time
mu sync.RWMutex
Val map[string]interface{}
prov Provider
}
func NewSession(id string, provider Provider) *Session {
v := make(map[string]interface{})
return &Session{
id: id,
Last: time.Now(),
Val: v, prov: provider,
}
}
func (s *Session) Set(key string, val interface{}) error {
s.mu.Lock()
defer s.mu.Unlock()
s.Val[key] = val
s.prov.Update(s.id)
return nil
}
func (s *Session) Get(key string) interface{} {
s.mu.RLock()
defer s.mu.RUnlock()
// TODO: ? deal with bool
v, _ := s.Val[key]
s.prov.Update(s.id)
return v
}
func (s *Session) Unset(key string) {
s.mu.Lock()
defer s.mu.Unlock()
delete(s.Val, key)
s.prov.Update(s.id)
}
func (s *Session) ID() string {
return s.id
}
|
package sessmgr
import (
"sync"
"time"
)
type Session struct {
id string
Last time.Time
mu sync.RWMutex
Val map[string]interface{}
prov Provider
}
func NewSession(id string, provider Provider) *Session {
v := make(map[string]interface{}, 0)
return &Session{
id: id,
Last: time.Now(),
Val: v, prov: provider,
}
}
func (s *Session) Set(key string, val interface{}) error {
s.mu.Lock()
defer s.mu.Unlock()
s.Val[key] = val
s.prov.Update(s.id)
return nil
}
func (s *Session) Get(key string) interface{} {
s.mu.RLock()
defer s.mu.RUnlock()
// TODO: ? deal with bool
v, _ := s.Val[key]
s.prov.Update(s.id)
return v
}
func (s *Session) Unset(key string) {
s.mu.Lock()
defer s.mu.Unlock()
delete(s.Val, key)
s.prov.Update(s.id)
}
func (s *Session) ID() string {
return s.id
}
|
Fix converter to escape HTML characters
|
/*-
* #%L
* SciJava polyglot kernel for Jupyter.
* %%
* Copyright (C) 2017 Hadrien Mary
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package org.scijava.notebook.converter;
import org.apache.commons.lang3.StringEscapeUtils;
import org.scijava.Priority;
import org.scijava.convert.Converter;
import org.scijava.notebook.converter.ouput.HTMLNotebookOutput;
import org.scijava.plugin.Plugin;
@Plugin(type = Converter.class, priority = Priority.LOW_PRIORITY)
public class StringToHTMLNotebookConverter
extends NotebookOutputConverter<String, HTMLNotebookOutput> {
@Override
public Class<String> getInputType() {
return String.class;
}
@Override
public Class<HTMLNotebookOutput> getOutputType() {
return HTMLNotebookOutput.class;
}
@Override
public HTMLNotebookOutput convert(Object object) {
return new HTMLNotebookOutput(HTMLNotebookOutput.getMimeType(),
StringEscapeUtils.escapeHtml4((String) object));
}
}
|
/*-
* #%L
* SciJava polyglot kernel for Jupyter.
* %%
* Copyright (C) 2017 Hadrien Mary
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package org.scijava.notebook.converter;
import org.scijava.Priority;
import org.scijava.convert.Converter;
import org.scijava.notebook.converter.ouput.HTMLNotebookOutput;
import org.scijava.plugin.Plugin;
@Plugin(type = Converter.class, priority = Priority.LOW_PRIORITY)
public class StringToHTMLNotebookConverter
extends NotebookOutputConverter<String, HTMLNotebookOutput> {
@Override
public Class<String> getInputType() {
return String.class;
}
@Override
public Class<HTMLNotebookOutput> getOutputType() {
return HTMLNotebookOutput.class;
}
@Override
public HTMLNotebookOutput convert(Object object) {
return new HTMLNotebookOutput(HTMLNotebookOutput.getMimeType(),
(String) object);
}
}
|
Enable tooltip freeze on samples.
|
'use strict';
var React = require('react');
var Sortable = require('../views/Sortable');
var {deepPureRenderMixin} = require('../react-utils');
// We skip the first column to keep 'samples' on the left.
function makeSortable(Component) {
return React.createClass({
displayName: 'SpreadsheetSortable',
mixins: [deepPureRenderMixin],
render() {
var {onClick, onReorder, children, ...otherProps} = this.props,
[first, ...rest] = React.Children.toArray(children);
return (
<Component {...otherProps}>
<div onClick={onClick}>
{first}
</div>
<Sortable onClick={onClick} onReorder={order => onReorder([first.props.actionKey, ...order])}>
{rest}
</Sortable>
</Component>);
}
});
}
module.exports = makeSortable;
|
'use strict';
var React = require('react');
var Sortable = require('../views/Sortable');
var {deepPureRenderMixin} = require('../react-utils');
// We skip the first column to keep 'samples' on the left.
function makeSortable(Component) {
return React.createClass({
displayName: 'SpreadsheetSortable',
mixins: [deepPureRenderMixin],
render() {
var {onClick, onReorder, children, ...otherProps} = this.props,
[first, ...rest] = React.Children.toArray(children);
return (
<Component {...otherProps}>
{first}
<Sortable onClick={onClick} onReorder={order => onReorder([first.props.actionKey, ...order])}>
{rest}
</Sortable>
</Component>);
}
});
}
module.exports = makeSortable;
|
Fix usage comment to refer to the correct filename
|
"""
Usage:
$ python get_test_group.py group_1
test_foo.py test_bar.py::TestClass
$
This is used by CI to run only a certain set of tests on a particular builder.
See ``test_groups.yaml`` for details.
"""
import yaml
from pathlib import Path
from typing import List
import click
def patterns_from_group(group_name: str) -> List[str]:
"""
Given a group name, return all the pytest patterns defined for that group
in ``test_groups.yaml``.
"""
test_group_file = Path(__file__).parent / 'test_groups.yaml'
test_group_file_contents = test_group_file.read_text()
test_groups = yaml.load(test_group_file_contents)['groups']
return test_groups[group_name]
@click.command('list-integration-test-patterns')
@click.argument('group_name')
def list_integration_test_patterns(group_name: str) -> None:
"""
Perform a release.
"""
test_patterns = patterns_from_group(group_name=group_name)
click.echo(' '.join(test_patterns), nl=False)
if __name__ == '__main__':
list_integration_test_patterns()
|
"""
Usage:
$ python test_group_name.py group_1
test_foo.py test_bar.py::TestClass
$
This is used by CI to run only a certain set of tests on a particular builder.
See ``test_groups.yaml`` for details.
"""
import yaml
from pathlib import Path
from typing import List
import click
def patterns_from_group(group_name: str) -> List[str]:
"""
Given a group name, return all the pytest patterns defined for that group
in ``test_groups.yaml``.
"""
test_group_file = Path(__file__).parent / 'test_groups.yaml'
test_group_file_contents = test_group_file.read_text()
test_groups = yaml.load(test_group_file_contents)['groups']
return test_groups[group_name]
@click.command('list-integration-test-patterns')
@click.argument('group_name')
def list_integration_test_patterns(group_name: str) -> None:
"""
Perform a release.
"""
test_patterns = patterns_from_group(group_name=group_name)
click.echo(' '.join(test_patterns), nl=False)
if __name__ == '__main__':
list_integration_test_patterns()
|
Fix format string for Python 2.6
|
# -*- coding: utf-8 -*-
'''Test using a proxy.'''
# External imports
import multiprocessing
import pytest
from six.moves import socketserver, SimpleHTTPServer
from six.moves.urllib.request import urlopen
# Internal imports
import vcr
# Conditional imports
requests = pytest.importorskip("requests")
class Proxy(SimpleHTTPServer.SimpleHTTPRequestHandler):
'''
Simple proxy server.
(from: http://effbot.org/librarybook/simplehttpserver.htm).
'''
def do_GET(self):
self.copyfile(urlopen(self.path), self.wfile)
@pytest.yield_fixture(scope='session')
def proxy_server():
httpd = socketserver.ThreadingTCPServer(('', 0), Proxy)
proxy_process = multiprocessing.Process(
target=httpd.serve_forever,
)
proxy_process.start()
yield 'http://{0}:{1}'.format(*httpd.server_address)
proxy_process.terminate()
def test_use_proxy(tmpdir, httpbin, proxy_server):
'''Ensure that it works with a proxy.'''
with vcr.use_cassette(str(tmpdir.join('proxy.yaml'))):
requests.get(httpbin.url, proxies={'http': proxy_server})
requests.get(httpbin.url, proxies={'http': proxy_server})
|
# -*- coding: utf-8 -*-
'''Test using a proxy.'''
# External imports
import multiprocessing
import pytest
requests = pytest.importorskip("requests")
from six.moves import socketserver, SimpleHTTPServer
from six.moves.urllib.request import urlopen
# Internal imports
import vcr
class Proxy(SimpleHTTPServer.SimpleHTTPRequestHandler):
'''
Simple proxy server.
(from: http://effbot.org/librarybook/simplehttpserver.htm).
'''
def do_GET(self):
self.copyfile(urlopen(self.path), self.wfile)
@pytest.yield_fixture(scope='session')
def proxy_server(httpbin):
httpd = socketserver.ForkingTCPServer(('', 0), Proxy)
proxy_process = multiprocessing.Process(
target=httpd.serve_forever,
)
proxy_process.start()
yield 'http://{}:{}'.format(*httpd.server_address)
proxy_process.terminate()
def test_use_proxy(tmpdir, httpbin, proxy_server):
'''Ensure that it works with a proxy.'''
with vcr.use_cassette(str(tmpdir.join('proxy.yaml'))):
requests.get(httpbin.url, proxies={'http': proxy_server})
requests.get(httpbin.url, proxies={'http': proxy_server})
|
Add dependency and correct name
|
#!/usr/bin/env python3
import os
from setuptools import setup, find_packages
from vtimshow import defaults
def read(fname):
"""
Utility function to read a file.
"""
return open(fname, "r").read().strip()
setup(
name = "vtimshow",
version = read(os.path.join(os.path.dirname(__file__), "VERSION")),
packages = find_packages(),
description = defaults.COMMENT,
long_description = read(
os.path.join(os.path.dirname(__file__), "README.txt")
),
author = defaults.AUTHOR,
author_email = defaults.AUTHOR_EMAIL,
license = defaults.LICENSE,
install_requires = ["ViTables >2.1", "pyqtgraph"],
dependency_links = [
"https://github.com/uvemas/ViTables@f6cb68227e10bf0658fd11b8daa56b76452b0341#egg=project-version"
],
entry_points = {
"vitables.plugins" :
defaults.UID +" = vtimshow:VtImageViewer"
}
)
|
#!/usr/bin/env python3
import os
from setuptools import setup, find_packages
from vtimshow import defaults
def read(fname):
"""
Utility function to read a file.
"""
return open(fname, "r").read().strip()
setup(
name = "vtimshow",
version = read(os.path.join(os.path.dirname(__file__), "VERSION")),
packages = find_packages(),
description = defaults.COMMENT,
long_description = read(
os.path.join(os.path.dirname(__file__), "README.txt")
),
author = defaults.AUTHOR,
author_email = defaults.AUTHOR_EMAIL,
license = defaults.LICENSE,
install_requires = ["ViTables >2.1",],
dependency_links = [
"https://github.com/uvemas/ViTables@f6cb68227e10bf0658fd11b8daa56b76452b0341#egg=project-version"
],
entry_points = {
"vitables.plugins" :
"image_viewer = vtimshow:VtImageViewer"
}
)
|
Update expected endpoint object structure
|
/* global describe, context, it */
'use strict';
const path = require('path');
const chai = require('chai');
const endpoints = require(path.normalize(`${__dirname}/../lib/endpoints`));
describe('lib/endpoints.js', function () {
it('should have a method "getEndpoint"', function (done) {
chai.assert.isFunction(endpoints.getEndpoint);
done();
});
context('.getEndpoint()', function () {
const endpoint = endpoints.getEndpoint();
it('should return the US endpoint', function (done) {
chai.assert.deepEqual(endpoint, {
hostname: 'https://us.api.battle.net',
locale: 'en_US'
});
done();
});
});
context('.getEndpoint("sea")', function () {
const endpoint = endpoints.getEndpoint('sea');
it('should return the SEA endpoint', function (done) {
chai.assert.deepEqual(endpoint, {
hostname: 'https://sea.api.battle.net',
locale: 'en_US'
});
done();
});
});
context('.getEndpoint("eu", "es_ES")', function () {
const endpoint = endpoints.getEndpoint('eu', 'es_ES');
it('should return the EU endpoint', function (done) {
chai.assert.deepEqual(endpoint, {
hostname: 'https://eu.api.battle.net',
locale: 'es_ES'
});
done();
});
});
});
|
/* global describe, context, it */
'use strict';
const path = require('path');
const chai = require('chai');
const endpoints = require(path.normalize(`${__dirname}/../lib/endpoints`));
describe('lib/endpoints.js', function () {
it('should have a method `getEndpoint`', function (done) {
chai.assert.isFunction(endpoints.getEndpoint);
done();
});
context('.getEndpoint()', function () {
const endpoint = endpoints.getEndpoint();
it('should return the default endpoint', function (done) {
chai.assert.deepEqual(endpoint, {
hostname: 'https://us.api.battle.net',
defaultLocale: 'en_US',
locales: ['en_US', 'es_MX', 'pt_BR']
});
done();
});
});
context('.getEndpoint(`eu`)', function () {
const endpoint = endpoints.getEndpoint('eu');
it('should return the requested endpoint', function (done) {
chai.assert.deepEqual(endpoint, {
hostname: 'https://eu.api.battle.net',
defaultLocale: 'en_GB',
locales: ['en_GB', 'es_ES', 'fr_FR', 'ru_RU', 'de_DE', 'pt_PT', 'it_IT']
});
done();
});
});
});
|
Fix import of Callable for Python 3.9
Python 3.3 moved Callable to collections.abc and Python 3.9 removes Callable from collections module
|
from __future__ import unicode_literals
import re
try:
from collections.abc import Callable
except ImportError:
from collections import Callable
def user_is_authenticated(user):
if isinstance(user.is_authenticated, Callable):
authenticated = user.is_authenticated()
else:
authenticated = user.is_authenticated
return authenticated
def camelToSnake(s):
"""
https://gist.github.com/jaytaylor/3660565
Is it ironic that this function is written in camel case, yet it
converts to snake case? hmm..
"""
_underscorer1 = re.compile(r'(.)([A-Z][a-z]+)')
_underscorer2 = re.compile('([a-z0-9])([A-Z])')
subbed = _underscorer1.sub(r'\1_\2', s)
return _underscorer2.sub(r'\1_\2', subbed).lower()
def snake_to_title(s):
return ' '.join(x.capitalize() for x in s.split('_'))
def camel_or_snake_to_title(s):
return snake_to_title(camelToSnake(s))
|
from __future__ import unicode_literals
import re
import collections
def user_is_authenticated(user):
if isinstance(user.is_authenticated, collections.Callable):
authenticated = user.is_authenticated()
else:
authenticated = user.is_authenticated
return authenticated
def camelToSnake(s):
"""
https://gist.github.com/jaytaylor/3660565
Is it ironic that this function is written in camel case, yet it
converts to snake case? hmm..
"""
_underscorer1 = re.compile(r'(.)([A-Z][a-z]+)')
_underscorer2 = re.compile('([a-z0-9])([A-Z])')
subbed = _underscorer1.sub(r'\1_\2', s)
return _underscorer2.sub(r'\1_\2', subbed).lower()
def snake_to_title(s):
return ' '.join(x.capitalize() for x in s.split('_'))
def camel_or_snake_to_title(s):
return snake_to_title(camelToSnake(s))
|
Fix typo (filesytem --> filesystem).
RELNOTES: None.
PiperOrigin-RevId: 216337432
|
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.actions;
import java.io.IOException;
/**
* Used to indicate a filesystem inconsistency, e.g. file 'a/b' exists but directory 'a' doesn't
* exist. This generally means the result of the build is undefined but we shouldn't crash hard.
*/
public class InconsistentFilesystemException extends IOException {
public InconsistentFilesystemException(String inconsistencyMessage) {
super(
"Inconsistent filesystem operations. "
+ inconsistencyMessage
+ " The results of the "
+ "build are not guaranteed to be correct. You should probably run 'bazel clean' and "
+ "investigate the filesystem inconsistency (likely due to filesystem updates "
+ "concurrent with the build)");
}
}
|
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.actions;
import java.io.IOException;
/**
* Used to indicate a filesystem inconsistency, e.g. file 'a/b' exists but directory 'a' doesn't
* exist. This generally means the result of the build is undefined but we shouldn't crash hard.
*/
public class InconsistentFilesystemException extends IOException {
public InconsistentFilesystemException(String inconsistencyMessage) {
super(
"Inconsistent filesystem operations. "
+ inconsistencyMessage
+ " The results of the "
+ "build are not guaranteed to be correct. You should probably run 'bazel clean' and "
+ "investigate the filesystem inconsistency (likely due to filesytem updates "
+ "concurrent with the build)");
}
}
|
Check if scholarship is open before rendering cta block
|
{{-- Block Type: Call To Action --}}
<section {{ output_id($block->block_title) }} class="segment segment--cta">
@if (!empty($block->block_title))
<h1 class="heading -alpha">{{ $block->block_title }}</h1>
@endif
<div class="wrapper">
@if (!empty($block->block_body_html))
{{ $block->block_body_html }}
@endif
@if($url === 'home' && !Scholarship::isClosed() && Scholarship::isOpen())
<div class="fragment">
@if (Auth::guest())
{{ link_to_route('registration.create', 'Start Application', null, ['class' => 'button -default']) }}
{{ link_to_route('status', 'or continue an application') }}
@else
{{ link_to_route('status', 'Continue Application', null, ['class' => 'button -default']) }}
@endif
</div>
@endif
</div>
</section>
|
{{-- Block Type: Call To Action --}}
<section {{ output_id($block->block_title) }} class="segment segment--cta">
@if (!empty($block->block_title))
<h1 class="heading -alpha">{{ $block->block_title }}</h1>
@endif
<div class="wrapper">
@if (!empty($block->block_body_html))
{{ $block->block_body_html }}
@endif
@if($url === 'home' && !Scholarship::isClosed())
<div class="fragment">
@if (Auth::guest())
{{ link_to_route('registration.create', 'Start Application', null, ['class' => 'button -default']) }}
{{ link_to_route('status', 'or continue an application') }}
@else
{{ link_to_route('status', 'Continue Application', null, ['class' => 'button -default']) }}
@endif
</div>
@endif
</div>
</section>
|
Comment out extra logging on errorHandler
|
import { alerts } from '../stores/alerts'
// Handle axios errors from backend API so that they can be shown
export function errorHandler (error) {
if (error.response) {
const response = error.response
// The request was made and the server responded with a status code
// that falls out of the range of 2xx
// console.error(response.data)
// console.log(response.status)
// console.log(response.headers)
alerts.error(response.data.errors.join('\n'))
throw new Error(error.response.data.errors)
} else if (error.request) {
// The request was made but no response was received
// `error.request` is an instance of XMLHttpRequest in the browser and an instance of
// http.ClientRequest in node.js
console.log(error.request)
} else {
// Something happened in setting up the request that triggered an Error
console.log('Error', error.message)
}
// console.log(error.config)
throw error
}
|
import { alerts } from '../stores/alerts'
// Handle axios errors from backend API so that they can be shown
export function errorHandler (error) {
if (error.response) {
const response = error.response
// The request was made and the server responded with a status code
// that falls out of the range of 2xx
console.error(response.data)
console.log(response.status)
console.log(response.headers)
alerts.error(response.data.errors.join('\n'))
throw new Error(error.response.data.errors)
} else if (error.request) {
// The request was made but no response was received
// `error.request` is an instance of XMLHttpRequest in the browser and an instance of
// http.ClientRequest in node.js
console.log(error.request)
} else {
// Something happened in setting up the request that triggered an Error
console.log('Error', error.message)
}
console.log(error.config)
throw error
}
|
Add image column to character table
|
<?php
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class CreateCharactersTable extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('characters', function (Blueprint $table) {
$table->increments('id');
$table->integer('vn_id')->unsigned()->nullable();;
// $table->foreign('vn_id')->references('vn')->on('id')->onDelete('cascade');
$table->string('kanji')->nullable()->default('');
$table->string('betsumyou')->nullable()->default('');
$table->string('yobikata')->nullable()->default('');
$table->smallInteger('birthmonth')->nullable();
$table->smallInteger('birthday')->nullable();
$table->string('image')->nullable();
$table->timestamps();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::drop('characters');
}
}
|
<?php
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class CreateCharactersTable extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('characters', function (Blueprint $table) {
$table->increments('id');
$table->integer('vn_id')->unsigned()->nullable();;
// $table->foreign('vn_id')->references('vn')->on('id')->onDelete('cascade');
$table->string('kanji')->nullable()->default('');
$table->string('betsumyou')->nullable()->default('');
$table->string('yobikata')->nullable()->default('');
$table->smallInteger('birthmonth')->nullable();
$table->smallInteger('birthday')->nullable();
$table->timestamps();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::drop('characters');
}
}
|
Fix syntax of dependencies to be compatible with debuild
Also drop argparse as an explicit dependency, it's
part of python3
|
from distutils.core import setup
from setuptools import find_packages
setup(
name='mediawiki-utilities',
version="0.4.15",
author='Aaron Halfaker',
author_email='aaron.halfaker@gmail.com',
packages=find_packages(),
scripts=[],
url='http://pypi.python.org/pypi/mediawiki-utilities',
license=open('LICENSE').read(),
description='A set of utilities for extracting and processing MediaWiki data.',
long_description=open('README.rst').read(),
install_requires=[
"requests>=2.4",
"pymysql>=0.6.2"],
test_suite='nose.collector',
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Environment :: Other Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Text Processing :: Linguistic",
"Topic :: Text Processing :: General",
"Topic :: Utilities",
"Topic :: Scientific/Engineering"
],
)
|
from distutils.core import setup
from setuptools import find_packages
setup(
name='mediawiki-utilities',
version="0.4.15",
author='Aaron Halfaker',
author_email='aaron.halfaker@gmail.com',
packages=find_packages(),
scripts=[],
url='http://pypi.python.org/pypi/mediawiki-utilities',
license=open('LICENSE').read(),
description='A set of utilities for extracting and processing MediaWiki data.',
long_description=open('README.rst').read(),
install_requires=[
"argparse >= 1.1",
"requests >= 2.0.1",
"pymysql >= 0.6.2"],
test_suite='nose.collector',
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Environment :: Other Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Text Processing :: Linguistic",
"Topic :: Text Processing :: General",
"Topic :: Utilities",
"Topic :: Scientific/Engineering"
],
)
|
Make sure streams always have a source.
|
import factory
import faker
import random
import uuid
from astral.models.stream import Stream
from astral.models.node import Node
from astral.models.ticket import Ticket
ELIXIR_CREATION = lambda class_to_create, **kwargs: class_to_create(**kwargs)
factory.Factory.set_creation_function(ELIXIR_CREATION)
class StreamFactory(factory.Factory):
id = factory.Sequence(lambda n: int(n) + 1)
name = factory.LazyAttribute(lambda a: ' '.join(faker.lorem.words()))
source = factory.LazyAttribute(lambda a: NodeFactory())
class NodeFactory(factory.Factory):
ip_address = factory.LazyAttribute(lambda a: faker.internet.ip_address())
uuid = factory.LazyAttribute(lambda a: random.randrange(1000, 1000000))
port = factory.LazyAttribute(lambda a: random.randrange(1000, 10000))
class ThisNodeFactory(factory.Factory):
FACTORY_FOR = Node
ip_address = '127.0.0.1'
uuid = factory.LazyAttribute(lambda a: uuid.getnode())
port = factory.LazyAttribute(lambda a: random.randrange(1000, 10000))
class TicketFactory(factory.Factory):
source = factory.LazyAttribute(lambda a: NodeFactory())
destination = factory.LazyAttribute(lambda a: NodeFactory())
stream = factory.LazyAttribute(lambda a: StreamFactory())
|
import factory
import faker
import random
import uuid
from astral.models.stream import Stream
from astral.models.node import Node
from astral.models.ticket import Ticket
ELIXIR_CREATION = lambda class_to_create, **kwargs: class_to_create(**kwargs)
factory.Factory.set_creation_function(ELIXIR_CREATION)
class StreamFactory(factory.Factory):
id = factory.Sequence(lambda n: int(n) + 1)
name = factory.LazyAttribute(lambda a: ' '.join(faker.lorem.words()))
class NodeFactory(factory.Factory):
ip_address = factory.LazyAttribute(lambda a: faker.internet.ip_address())
uuid = factory.LazyAttribute(lambda a: random.randrange(1000, 1000000))
port = factory.LazyAttribute(lambda a: random.randrange(1000, 10000))
class ThisNodeFactory(factory.Factory):
FACTORY_FOR = Node
ip_address = '127.0.0.1'
uuid = factory.LazyAttribute(lambda a: uuid.getnode())
port = factory.LazyAttribute(lambda a: random.randrange(1000, 10000))
class TicketFactory(factory.Factory):
source = factory.LazyAttribute(lambda a: NodeFactory())
destination = factory.LazyAttribute(lambda a: NodeFactory())
stream = factory.LazyAttribute(lambda a: StreamFactory())
|
Fix a postAccessRequired check for the case of nonexisting post
|
import { ForbiddenException, NotFoundException, ServerErrorException } from '../../support/exceptions';
import { dbAdapter } from '../../models';
export function postAccessRequired(map = { postId: 'post' }) {
return async (ctx, next) => {
const forbidden = (reason = 'You can not see this post') => new ForbiddenException(reason);
const notFound = (reason = 'Post not found') => new NotFoundException(reason);
const { user: viewer } = ctx.state;
await Promise.all(Object.keys(map).map(async (key) => {
if (!ctx.params[key]) {
throw new ServerErrorException(`Server misconfiguration: the required parameter '${key}' is missing`);
}
const { [key]: postId } = ctx.params;
const post = await dbAdapter.getPostById(postId);
const author = post ? await dbAdapter.getUserById(post.userId) : null;
if (!post || !author.isActive) {
throw notFound();
}
const isVisible = await post.isVisibleFor(viewer);
if (!isVisible) {
if (!viewer && post.isProtected === '1' && post.isPrivate === '0') {
throw forbidden('Please sign in to view this post');
}
throw forbidden();
}
ctx.state[map[key]] = post;
}));
await next();
};
}
|
import { ForbiddenException, NotFoundException, ServerErrorException } from '../../support/exceptions';
import { dbAdapter } from '../../models';
export function postAccessRequired(map = { postId: 'post' }) {
return async (ctx, next) => {
const forbidden = (reason = 'You can not see this post') => new ForbiddenException(reason);
const notFound = (reason = 'Post not found') => new NotFoundException(reason);
const { user: viewer } = ctx.state;
await Promise.all(Object.keys(map).map(async (key) => {
if (!ctx.params[key]) {
throw new ServerErrorException(`Server misconfiguration: the required parameter '${key}' is missing`);
}
const { [key]: postId } = ctx.params;
const post = await dbAdapter.getPostById(postId);
const author = await dbAdapter.getUserById(post.userId);
if (!post || !author.isActive) {
throw notFound();
}
const isVisible = await post.isVisibleFor(viewer);
if (!isVisible) {
if (!viewer && post.isProtected === '1' && post.isPrivate === '0') {
throw forbidden('Please sign in to view this post');
}
throw forbidden();
}
ctx.state[map[key]] = post;
}));
await next();
};
}
|
Fix `get_queue` call in `enqueue`
|
"""
byceps.util.jobqueue
~~~~~~~~~~~~~~~~~~~~
An asynchronously processed job queue based on Redis_ and RQ_.
.. _Redis: http://redis.io/
.. _RQ: http://python-rq.org/
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from contextlib import contextmanager
from flask import current_app
from rq import Connection, Queue
from byceps.redis import redis
@contextmanager
def connection():
with Connection(redis.client):
yield
def get_queue(app):
is_async = app.config['JOBS_ASYNC']
return Queue(is_async=is_async)
def enqueue(*args, **kwargs):
"""Add the function call to the queue as a job."""
with connection():
queue = get_queue(current_app)
queue.enqueue(*args, **kwargs)
|
"""
byceps.util.jobqueue
~~~~~~~~~~~~~~~~~~~~
An asynchronously processed job queue based on Redis_ and RQ_.
.. _Redis: http://redis.io/
.. _RQ: http://python-rq.org/
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from contextlib import contextmanager
from rq import Connection, Queue
from byceps.redis import redis
@contextmanager
def connection():
with Connection(redis.client):
yield
def get_queue(app):
is_async = app.config['JOBS_ASYNC']
return Queue(is_async=is_async)
def enqueue(*args, **kwargs):
"""Add the function call to the queue as a job."""
with connection():
queue = get_queue()
queue.enqueue(*args, **kwargs)
|
Remove unused use statement from test.
|
<?php
namespace Tests\MoFlashCards\ResourceBundle\Service;
use Symfony\Bundle\FrameworkBundle\Test\KernelTestCase;
class TextToSpeechServiceTest extends KernelTestCase
{
public function testGetAudio()
{
static::bootKernel();
$service = static::$kernel->getContainer()->get('resource.text.to.speech');
$audio = $service->get('hello')->getAudio();
// check audio mime type
$fileInfo = finfo_open();
$mimeType = finfo_buffer($fileInfo, $audio, FILEINFO_MIME_TYPE);
finfo_close($fileInfo);
$this->assertEquals('audio/mpeg', $mimeType, 'Mimetype is audio.');
}
}
|
<?php
namespace Tests\MoFlashCards\ResourceBundle\Service;
use MoFlashCards\ResourceBundle\Service\TextToSpeechService;
use Symfony\Bundle\FrameworkBundle\Test\KernelTestCase;
class TextToSpeechServiceTest extends KernelTestCase
{
public function testGetAudio()
{
static::bootKernel();
$service = static::$kernel->getContainer()->get('resource.text.to.speech');
$audio = $service->get('hello')->getAudio();
// check audio mime type
$fileInfo = finfo_open();
$mimeType = finfo_buffer($fileInfo, $audio, FILEINFO_MIME_TYPE);
finfo_close($fileInfo);
$this->assertEquals('audio/mpeg', $mimeType, 'Mimetype is audio.');
}
}
|
Enable backported ``int`` in future.builtins
|
"""
A module that brings in equivalents of the new and modified Python 3
builtins into Py2. Has no effect on Py3.
See the docs for these modules for more information::
- future.builtins.iterators
- future.builtins.backports
- future.builtins.misc
- future.builtins.disabled
"""
from future.builtins.iterators import (filter, map, zip)
from future.builtins.misc import (ascii, chr, hex, input, oct, open)
from future.builtins.backports import (bytes, int, range, round, str, super)
from future import utils
if not utils.PY3:
# We only import names that shadow the builtins on Py2. No other namespace
# pollution on Py2.
# Only shadow builtins on Py2; no new names
__all__ = ['filter', 'map', 'zip',
'ascii', 'chr', 'hex', 'input', 'oct', 'open',
'bytes', 'int', 'range', 'round', 'str', 'super',
]
else:
# No namespace pollution on Py3
__all__ = []
# TODO: add 'callable' for Py3.0 and Py3.1?
|
"""
A module that brings in equivalents of the new and modified Python 3
builtins into Py2. Has no effect on Py3.
See the docs for these modules for more information::
- future.builtins.iterators
- future.builtins.backports
- future.builtins.misc
- future.builtins.disabled
"""
from future.builtins.iterators import (filter, map, zip)
from future.builtins.misc import (ascii, chr, hex, input, int, oct, open)
from future.builtins.backports import (bytes, range, round, str, super)
from future import utils
if not utils.PY3:
# We only import names that shadow the builtins on Py2. No other namespace
# pollution on Py2.
# Only shadow builtins on Py2; no new names
__all__ = ['filter', 'map', 'zip',
'ascii', 'chr', 'hex', 'input', 'oct', 'open',
'bytes', 'int', 'range', 'round', 'str', 'super',
]
else:
# No namespace pollution on Py3
__all__ = []
# TODO: add 'callable' for Py3.0 and Py3.1?
|
Remove koa.js dependencies and add any required express middleware
|
var express = require('express'),
path = require('path'),
cors = require('cors'),
dotenv = require('dotenv'),
async = require('async'),
request = require('request'),
config = require('./config'),
app = express();
// Including CORS for cross-origin request access
app.use(cors());
// Setting application port
app.set('port', config.server.port);
// Load local environment variables
dotenv.load();
// Include Capsul API Routes
require('./routes')(app);
// Listen on local/heroku server port
app.listen(config.server.port, function() {
var status =
"Express server listening on port " + app.get('port') +
" in " + process.env.NODE_ENV + " environment " + "...";
console.log(status);
});
|
var compress = require('koa-compress'),
logger = require('koa-logger'),
serve = require('koa-static'),
route = require('koa-route'),
cors = require('koa-cors'),
koa = require('koa'),
dotenv = require('dotenv'),
path = require('path'),
util = require('util'),
async = require('async'),
request = require('request'),
config = require('./config'),
app = koa();
// Including CORS for cross-origin request access
app.use(cors());
// Load local environment variables
dotenv.load();
// Include Capsul Controllers
var controllers = require('./controllers');
// Include Capsul API Routes
require('./routes')(app, route, controllers);
// Listen on local/heroku server port
app.listen(config.server.port, function() {
console.log("Koa server listening on port " +
config.server.port +
"...");
});
|
Remove the "Trying service" log.
|
var Q = require('q');
var util = require('util');
module.exports.ServiceDispatcher = function() {
this.services = [];
};
module.exports.ServiceDispatcher.prototype.use = function(service) {
this.services.push(service);
};
module.exports.ServiceDispatcher.prototype.forAll = function(command) {
var promises = this.services.map(function(service) {
return Q.fcall(command, service);
});
return Q.all(promises);
};
module.exports.ServiceDispatcher.prototype.untilSuccess = function(command, isSuccess) {
var deferred = Q.defer();
var serviceIndex = 0;
var services = this.services;
function loop() {
if (services.length <= serviceIndex) {
return Q.resolve(undefined);
}
var service = services[serviceIndex];
serviceIndex++;
Q.when(command(service), function(result) {
if (result && (!isSuccess || isSuccess(result))) {
deferred.resolve(result);
} else if (serviceIndex < services.length) {
loop();
} else {
deferred.resolve(undefined);
}
}, deferred.reject);
}
Q.nextTick(loop);
return deferred.promise;
}
|
var Q = require('q');
var util = require('util');
module.exports.ServiceDispatcher = function() {
this.services = [];
};
module.exports.ServiceDispatcher.prototype.use = function(service) {
this.services.push(service);
};
module.exports.ServiceDispatcher.prototype.forAll = function(command) {
var promises = this.services.map(function(service) {
return Q.fcall(command, service);
});
return Q.all(promises);
};
module.exports.ServiceDispatcher.prototype.untilSuccess = function(command, isSuccess) {
var deferred = Q.defer();
var serviceIndex = 0;
var services = this.services;
function loop() {
if (services.length <= serviceIndex) {
return Q.resolve(undefined);
}
var service = services[serviceIndex];
serviceIndex++;
util.log('Trying service ' + service.name);
Q.when(command(service), function(result) {
if (result && (!isSuccess || isSuccess(result))) {
deferred.resolve(result);
} else if (serviceIndex < services.length) {
loop();
} else {
deferred.resolve(undefined);
}
}, deferred.reject);
}
Q.nextTick(loop);
return deferred.promise;
}
|
Add header to dev launch script
|
#!/usr/bin/env python
"""
# # #### ##### # # ##### # # # #
# # # # # ## ## # # # ## # #
### #### #### # # # # # # # # #####
# # # # # # # # ## # # #
# # # ##### # # # # # # # #
Kremlin Magical Everything System
Glasnost Image Board and Boredom Inhibitor
"""
from kremlin import app
def main():
print "Launching kremlin in development mode."
print "--------------------------------------"
app.run(debug=True)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
"""
# # #### ##### # # ##### # # # #
# # # # # ## ## # # # ## # #
### #### #### # # # # # # # # #####
# # # # # # # # ## # # #
# # # ##### # # # # # # # #
Kremlin Magical Everything System
Glasnost Image Board and Boredom Inhibitor
"""
from kremlin import app
app.run(debug=True)
|
Sort cache channels and keys alphabetically
|
var crypto = require('crypto');
function DatabaseTables(tables) {
this.namespace = 't';
this.tables = tables;
}
module.exports = DatabaseTables;
DatabaseTables.prototype.key = function() {
return this.tables.map(function(table) {
return this.namespace + ':' + shortHashKey(table.dbname + ':' + table.table_name + '.' + table.schema_name);
}.bind(this)).sort();
};
DatabaseTables.prototype.getCacheChannel = function() {
var key = this.tables.map(function(table) {
return table.dbname + ':' + table.schema_name + "." + table.table_name;
}).sort().join(";;");
return key;
};
function shortHashKey(target) {
return crypto.createHash('sha256').update(target).digest('base64').substring(0,6);
}
|
var crypto = require('crypto');
function DatabaseTables(tables) {
this.namespace = 't';
this.tables = tables;
}
module.exports = DatabaseTables;
DatabaseTables.prototype.key = function() {
return this.tables.map(function(table) {
return this.namespace + ':' + shortHashKey(table.dbname + ':' + table.table_name + '.' + table.schema_name);
}.bind(this));
};
DatabaseTables.prototype.getCacheChannel = function() {
var key = this.tables.map(function(table) {
return table.dbname + ':' + table.schema_name + "." + table.table_name;
}).join(";;");
return key;
};
function shortHashKey(target) {
return crypto.createHash('sha256').update(target).digest('base64').substring(0,6);
}
|
Handle favicon, HTTP and HTTPS
|
const {send} = require('micro');
const fetch = require('node-fetch');
const assert = require('http-assert');
module.exports = async (req, res) => {
let url = req.url.substr(1);
if (url === 'favicon.ico') return;
if (url.indexOf('http://') !== -1) {
url = url.substr(7);
} else if (url.indexOf('https://') !== -1) {
url = url.substr(8);
}
assert(url !== '', 400, 'URL must be defined. Usage: https://up.now.sh/google.com');
let statusCode;
let message;
try {
const response = await fetch(`http://${url}`, {
timeout: 5000
});
statusCode = response.status;
message = `${url} is up.`;
} catch (err) {
const {type, code} = err;
if (type === 'system' && code === 'ENOTFOUND') {
statusCode = 200;
message = `${url} is not up.`;
} else {
statusCode = 400;
message = `Something went wrong.`;
}
}
send(res, statusCode, message);
};
|
const {send} = require('micro');
const fetch = require('node-fetch');
const assert = require('http-assert');
module.exports = async (req, res) => {
const url = req.url.substr(1);
assert(url !== '', 400, 'URL must be defined.');
let statusCode;
let message;
try {
const response = await fetch(`http://${url}`, {
timeout: 5000
});
statusCode = response.status;
message = `${url} is up.`;
} catch (err) {
const {type, code} = err;
if (type === 'system' && code === 'ENOTFOUND') {
statusCode = 200;
message = `${url} is not up.`;
} else {
statusCode = 400;
message = `Something went wrong.`;
}
}
send(res, statusCode, message);
};
|
Allow passing initialization kwargs to PDFDocument through pdf_response
|
from datetime import date
import re
from django.db.models import Max, Min
from django.http import HttpResponse
from pdfdocument.document import PDFDocument
def worklog_period(obj):
activity_period = obj.worklogentries.aggregate(Max('date'), Min('date'))
article_period = obj.articleentries.aggregate(Max('date'), Min('date'))
min_date = date(1900, 1, 1)
max_date = date(3000, 1, 1)
if not (activity_period['date__min'] or article_period['date__min']):
return (min_date, max_date)
start = min(activity_period['date__min'] or max_date, article_period['date__min'] or max_date)
end = max(activity_period['date__max'] or min_date, article_period['date__max'] or min_date)
return (start, end)
def worklog_period_string(obj):
start, end = obj.worklog_period()
return u'%s - %s' % (start.strftime('%d.%m.%Y'), end.strftime('%d.%m.%Y'))
FILENAME_RE = re.compile(r'[^A-Za-z0-9\-\.]+')
def pdf_response(filename, **kwargs):
response = HttpResponse(mimetype='application/pdf')
response['Content-Disposition'] = 'attachment; filename=%s.pdf' %\
FILENAME_RE.sub('-', filename)
return PDFDocument(response, **kwargs), response
|
from datetime import date
import re
from django.db.models import Max, Min
from django.http import HttpResponse
from pdfdocument.document import PDFDocument
def worklog_period(obj):
activity_period = obj.worklogentries.aggregate(Max('date'), Min('date'))
article_period = obj.articleentries.aggregate(Max('date'), Min('date'))
min_date = date(1900, 1, 1)
max_date = date(3000, 1, 1)
if not (activity_period['date__min'] or article_period['date__min']):
return (min_date, max_date)
start = min(activity_period['date__min'] or max_date, article_period['date__min'] or max_date)
end = max(activity_period['date__max'] or min_date, article_period['date__max'] or min_date)
return (start, end)
def worklog_period_string(obj):
start, end = obj.worklog_period()
return u'%s - %s' % (start.strftime('%d.%m.%Y'), end.strftime('%d.%m.%Y'))
FILENAME_RE = re.compile(r'[^A-Za-z0-9\-\.]+')
def pdf_response(filename):
response = HttpResponse(mimetype='application/pdf')
response['Content-Disposition'] = 'attachment; filename=%s.pdf' %\
FILENAME_RE.sub('-', filename)
return PDFDocument(response), response
|
Move constant field to class constructor
|
<?php
/**
* Plugin Name: Hackerspace
*
* see http://spaceapi.net/documentation#documentation-tab-13
*/
class SpaceApi
{
const API_VERSION = '0.13';
public function __construct()
{
$this->spaceapi = new stdClass; // TODO create an external regular class ?
$this->spaceapi->api = self::API_VERSION;
}
// save the space api settings
public function save_spaceapi_settings()
{
//$spaceapi = $this->spaceapi;
}
// get space api infos from the settings
public function get_spaceapi_settings()
{
$spaceapi = $this->spaceapi;
$spaceapi->state->open = null;
return $spaceapi;
}
//render json encoded Space Api infos
public function spaceapi_json()
{
header('Access-Control-Allow-Origin: *;');
header('Content-Type: application/json; charset='.get_option('blog_charset'));
header('Cache-Control: no-cache;');
$spaceapi = $this->get_spaceapi_settings();
echo json_encode($spaceapi);
}
}
|
<?php
/**
* Plugin Name: Hackerspace
*
* see http://spaceapi.net/documentation#documentation-tab-13
*/
class SpaceApi
{
const API_VERSION = '0.13';
// get space api infos from the settings
public function get_spaceapi_settings()
{
$spaceapi = new stdClass;
$spaceapi->api = self::API_VERSION;
$spaceapi->state->open = null;
return $spaceapi;
}
//render json encoded Space Api infos
public function spaceapi_json()
{
header('Access-Control-Allow-Origin: *;');
header('Content-Type: application/json; charset='.get_option('blog_charset'));
header('Cache-Control: no-cache;');
$spaceapi = $this->get_spaceapi_settings();
echo json_encode($spaceapi);
}
}
|
Update maxBuffer limit to 100Mib
|
#!/usr/bin/env node
/*
*
* Copyright 2015 gRPC authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
/**
* This file is required because package.json cannot reference a file that
* is not distributed with the package, and we use node-pre-gyp to distribute
* the plugin binary
*/
'use strict';
var path = require('path');
var execFile = require('child_process').execFile;
var exe_ext = process.platform === 'win32' ? '.exe' : '';
var plugin = path.resolve(__dirname, 'grpc_node_plugin' + exe_ext);
var child_process = execFile(plugin, process.argv.slice(2), {encoding: 'buffer', maxBuffer: 1024 * 1024 * 100}, function(error, stdout, stderr) {
if (error) {
throw error;
}
});
process.stdin.pipe(child_process.stdin);
child_process.stdout.pipe(process.stdout);
child_process.stderr.pipe(process.stderr);
|
#!/usr/bin/env node
/*
*
* Copyright 2015 gRPC authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
/**
* This file is required because package.json cannot reference a file that
* is not distributed with the package, and we use node-pre-gyp to distribute
* the plugin binary
*/
'use strict';
var path = require('path');
var execFile = require('child_process').execFile;
var exe_ext = process.platform === 'win32' ? '.exe' : '';
var plugin = path.resolve(__dirname, 'grpc_node_plugin' + exe_ext);
var child_process = execFile(plugin, process.argv.slice(2), {encoding: 'buffer', maxBuffer: 1024 * 1000}, function(error, stdout, stderr) {
if (error) {
throw error;
}
});
process.stdin.pipe(child_process.stdin);
child_process.stdout.pipe(process.stdout);
child_process.stderr.pipe(process.stderr);
|
Revert "use mktmp in driver tests"
This reverts commit 55c3796beadbf427b844972b68fb7c2078fea0ac.
|
// mongodb-driver-test.js
//
// Testing the mongodb driver
//
// Copyright 2012, StatusNet Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
var assert = require('assert'),
vows = require('vows'),
databank = require('databank'),
Databank = databank.Databank,
MongoDatabank = require('../lib/index');
Databank.register('mongodb', MongoDatabank);
var params = {host: 'localhost',
port: 27017,
dbname: 'test',
checkSchema: true};
var suite = databank.DriverTest('mongodb', params);
suite['export'](module);
|
// mongodb-driver-test.js
//
// Testing the mongodb driver
//
// Copyright 2012, StatusNet Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
var assert = require('assert'),
vows = require('vows'),
databank = require('databank'),
Databank = databank.Databank,
MongoDatabank = require('../lib/index');
Databank.register('mongodb', MongoDatabank);
var params = {host: 'localhost',
port: 27017,
mktmp: true,
checkSchema: true};
var suite = databank.DriverTest('mongodb', params);
suite['export'](module);
|
Fix gulp-istanbul 0.5.0 doesn't generate lcov file.
|
var gulp = require('gulp');
var $ = require('gulp-load-plugins')();
var rirmaf = require('rimraf');
var lib = 'lib/**/*.js';
gulp.task('coverage', function(){
return gulp.src(lib)
.pipe($.istanbul())
.pipe($.istanbul.hookRequire());
});
gulp.task('coverage:clean', function(callback){
rirmaf('coverage', callback);
});
gulp.task('mocha', ['coverage'], function(){
return gulp.src('test/index.js')
.pipe($.mocha({
reporter: 'spec'
}))
.pipe($.istanbul.writeReports());
});
gulp.task('jshint', function(){
return gulp.src(lib)
.pipe($.jshint())
.pipe($.jshint.reporter('jshint-stylish'))
.pipe($.jshint.reporter('fail'));
});
gulp.task('watch', function(){
gulp.watch(lib, ['mocha', 'jshint']);
gulp.watch(['test/index.js'], ['mocha']);
});
gulp.task('test', ['mocha', 'jshint']);
|
'use strict';
var gulp = require('gulp');
var $ = require('gulp-load-plugins')();
var rirmaf = require('rimraf');
var lib = 'lib/**/*.js';
gulp.task('coverage', function(){
return gulp.src(lib)
.pipe($.istanbul());
});
gulp.task('coverage:clean', function(callback){
rirmaf('coverage', callback);
});
gulp.task('mocha', ['coverage'], function(){
return gulp.src('test/index.js')
.pipe($.mocha({
reporter: 'spec'
}))
.pipe($.istanbul.writeReports());
});
gulp.task('jshint', function(){
return gulp.src(lib)
.pipe($.jshint())
.pipe($.jshint.reporter('jshint-stylish'))
.pipe($.jshint.reporter('fail'));
});
gulp.task('watch', function(){
gulp.watch(lib, ['mocha', 'jshint']);
gulp.watch(['test/index.js'], ['mocha']);
});
gulp.task('test', ['mocha', 'jshint']);
|
Add TODO, sign bytes may be words
|
package me.hugmanrique.pokedata.sprites.signs;
import me.hugmanrique.pokedata.sprites.Sprite;
import me.hugmanrique.pokedata.utils.ROM;
/**
* @author Hugmanrique
* @since 02/05/2017
*/
public class SpriteSign extends Sprite {
// TODO Check if some of these bytes are words values
private byte b2;
private byte b4;
private byte b5;
private byte b6;
private byte b7;
private byte b8;
private long scriptPtr;
public SpriteSign(ROM rom) {
x = rom.readByte();
b2 = rom.readByte();
y = rom.readByte();
b4 = rom.readByte();
b5 = rom.readByte();
b6 = rom.readByte();
b7 = rom.readByte();
b8 = rom.readByte();
scriptPtr = rom.getPointer();
}
@Override
public int getSize() {
return 12;
}
}
|
package me.hugmanrique.pokedata.sprites.signs;
import me.hugmanrique.pokedata.sprites.Sprite;
import me.hugmanrique.pokedata.utils.ROM;
/**
* @author Hugmanrique
* @since 02/05/2017
*/
public class SpriteSign extends Sprite {
private byte b2;
private byte b4;
private byte b5;
private byte b6;
private byte b7;
private byte b8;
private long scriptPtr;
public SpriteSign(ROM rom) {
x = rom.readByte();
b2 = rom.readByte();
y = rom.readByte();
b4 = rom.readByte();
b5 = rom.readByte();
b6 = rom.readByte();
b7 = rom.readByte();
b8 = rom.readByte();
scriptPtr = rom.getPointer();
}
@Override
public int getSize() {
return 12;
}
}
|
Add overload method for easier immediate friend accepting in dev
|
package GameNationBackEnd.Documents;
import com.sun.org.apache.xpath.internal.operations.Bool;
import org.springframework.data.annotation.Id;
import org.springframework.data.mongodb.core.mapping.DBRef;
import org.springframework.data.mongodb.core.mapping.Document;
/**
* Created by tijs on 30/11/2016.
*/
@Document
public class Friend {
@Id
private String id;
@DBRef
private User sender;
@DBRef
private User receiver;
private Boolean accepted;
private Boolean sended;
public Friend(){
sender = new User();
receiver = new User();
}
public Friend(User sender, User receiver) {
this(sender, receiver, false);
}
public Friend(User sender, User receiver, boolean accepted){
this.sender = sender;
this.receiver = receiver;
this.sended = true;
this.accepted = accepted;
}
public User getSender() {
return sender;
}
public User getReceiver() {
return receiver;
}
public String getId() {
return id;
}
public Boolean isAccepted(){ return accepted; }
public Boolean getSended(){ return sended; }
public void setAccepted(Boolean a){ this.accepted = a; }
public void setSended(Boolean s){ this.sended = s; }
}
|
package GameNationBackEnd.Documents;
import com.sun.org.apache.xpath.internal.operations.Bool;
import org.springframework.data.annotation.Id;
import org.springframework.data.mongodb.core.mapping.DBRef;
import org.springframework.data.mongodb.core.mapping.Document;
/**
* Created by tijs on 30/11/2016.
*/
@Document
public class Friend {
@Id
private String id;
@DBRef
private User sender;
@DBRef
private User receiver;
private Boolean accepted;
private Boolean sended;
public Friend(){
sender = new User();
receiver = new User();
}
public Friend(User sender, User receiver){
this.sender = sender;
this.receiver = receiver;
this.sended = true;
}
public User getSender() {
return sender;
}
public User getReceiver() {
return receiver;
}
public String getId() {
return id;
}
public Boolean isAccepted(){ return accepted; }
public Boolean getSended(){ return sended; }
public void setAccepted(Boolean a){ this.accepted = a; }
public void setSended(Boolean s){ this.sended = s; }
}
|
Fix path to pybossa tests
|
import json
import random
from mock import patch
from pybossa.model.task import Task
from pybossa.model.project import Project
from pybossa.model.user import User
from pybossa.model.task_run import TaskRun
from pybossa.model.category import Category
import pybossa
import sys
import os
sys.path.append(os.path.abspath("./pybossa/test"))
from helper import sched
from default import Test, db, with_context
class TestSched(sched.Helper):
def setUp(self):
super(TestSched, self).setUp()
self.endpoints = ['project', 'task', 'taskrun']
@with_context
def test_get_random_task(self):
self._test_get_random_task()
def _test_get_random_task(self, user=None):
task = pybossa.sched.get_random_task(project_id=1)
assert task is not None, task
tasks = db.session.query(Task).all()
for t in tasks:
db.session.delete(t)
db.session.commit()
task = pybossa.sched.get_random_task(project_id=1)
assert task is None, task
|
import json
import random
from mock import patch
from pybossa.model.task import Task
from pybossa.model.project import Project
from pybossa.model.user import User
from pybossa.model.task_run import TaskRun
from pybossa.model.category import Category
import pybossa
import sys
import os
sys.path.append(os.path.abspath("../pybossa/test"))
from helper import sched
from default import Test, db, with_context
class TestSched(sched.Helper):
def setUp(self):
super(TestSched, self).setUp()
self.endpoints = ['project', 'task', 'taskrun']
@with_context
def test_get_random_task(self):
self._test_get_random_task()
def _test_get_random_task(self, user=None):
task = pybossa.sched.get_random_task(project_id=1)
assert task is not None, task
tasks = db.session.query(Task).all()
for t in tasks:
db.session.delete(t)
db.session.commit()
task = pybossa.sched.get_random_task(project_id=1)
assert task is None, task
|
Allow to avoid translation of classname
|
package ru.ydn.wicket.wicketorientdb.utils;
import java.util.List;
import org.apache.wicket.markup.html.form.IChoiceRenderer;
import org.apache.wicket.model.IModel;
import ru.ydn.wicket.wicketorientdb.OrientDbWebSession;
import ru.ydn.wicket.wicketorientdb.model.OClassNamingModel;
import com.orientechnologies.orient.core.metadata.schema.OClass;
/**
* {@link IChoiceRenderer} for {@link OClass}es
*/
public class OClassChoiceRenderer implements IChoiceRenderer<OClass> {
public static final OClassChoiceRenderer INSTANCE = new OClassChoiceRenderer(true);
public static final OClassChoiceRenderer INSTANCE_NO_LOCALIZATION = new OClassChoiceRenderer(false);
private static final long serialVersionUID = 1L;
private boolean localize;
public OClassChoiceRenderer(boolean localize) {
this.localize = localize;
}
@Override
public Object getDisplayValue(OClass object) {
return localize?new OClassNamingModel(object).getObject():object.getName();
}
@Override
public String getIdValue(OClass object, int index) {
return object.getName();
}
@Override
public OClass getObject(String id,
IModel<? extends List<? extends OClass>> choicesModel) {
OClass ret = OrientDbWebSession.get().getDatabase().getMetadata().getSchema().getClass(id);
List<? extends OClass> choices = choicesModel.getObject();
return choices!=null && choices.contains(ret) ? ret : null;
}
}
|
package ru.ydn.wicket.wicketorientdb.utils;
import java.util.List;
import org.apache.wicket.markup.html.form.IChoiceRenderer;
import org.apache.wicket.model.IModel;
import ru.ydn.wicket.wicketorientdb.OrientDbWebSession;
import ru.ydn.wicket.wicketorientdb.model.OClassNamingModel;
import com.orientechnologies.orient.core.metadata.schema.OClass;
/**
* {@link IChoiceRenderer} for {@link OClass}es
*/
public class OClassChoiceRenderer implements IChoiceRenderer<OClass> {
public static final OClassChoiceRenderer INSTANCE = new OClassChoiceRenderer();
private static final long serialVersionUID = 1L;
@Override
public Object getDisplayValue(OClass object) {
return new OClassNamingModel(object).getObject();
}
@Override
public String getIdValue(OClass object, int index) {
return object.getName();
}
@Override
public OClass getObject(String id,
IModel<? extends List<? extends OClass>> choicesModel) {
OClass ret = OrientDbWebSession.get().getDatabase().getMetadata().getSchema().getClass(id);
List<? extends OClass> choices = choicesModel.getObject();
return choices!=null && choices.contains(ret) ? ret : null;
}
}
|
Add a build version to executables
Use -v to print version info.
|
package main
import (
"flag"
"github.com/litl/galaxy/log"
)
var (
// Location of the default config.
// This will not be overwritten by shuttle.
defaultConfig string
// Location of the live config which is updated on every state change.
// The default config is loaded if this file does not exist.
stateConfig string
// Listen address for the http server.
listenAddr string
// Debug logging
debug bool
// version flags
version bool
buildVersion string
)
func init() {
flag.StringVar(&listenAddr, "http", "127.0.0.1:9090", "http server address")
flag.StringVar(&defaultConfig, "config", "", "default config file")
flag.StringVar(&stateConfig, "state", "", "updated config which reflects the internal state")
flag.BoolVar(&debug, "debug", false, "verbose logging")
flag.BoolVar(&version, "v", false, "display version")
flag.Parse()
}
func main() {
if debug {
log.DefaultLogger.Level = log.DEBUG
}
if version {
println(buildVersion)
return
}
loadConfig()
startHTTPServer()
}
|
package main
import (
"flag"
"github.com/litl/galaxy/log"
)
var (
// Location of the default config.
// This will not be overwritten by shuttle.
defaultConfig string
// Location of the live config which is updated on every state change.
// The default config is loaded if this file does not exist.
stateConfig string
// Listen address for the http server.
listenAddr string
// Debug logging
debug bool
)
func init() {
flag.StringVar(&listenAddr, "http", "127.0.0.1:9090", "http server address")
flag.StringVar(&defaultConfig, "config", "", "default config file")
flag.StringVar(&stateConfig, "state", "", "updated config which reflects the internal state")
flag.BoolVar(&debug, "debug", false, "verbose logging")
flag.Parse()
}
func main() {
if debug {
log.DefaultLogger.Level = log.DEBUG
}
loadConfig()
startHTTPServer()
}
|
Load next page shortly before hitting the bottom
This improves the user experience by loading in the next entries shortly before him getting to the bottom. It makes the scrolling more smooth without a break in between.
It also fixes an error on my browser that scrolling never hits the defined number. When I debugged it I hit `.scrolltop` of 1092.5 and the `doc.height - win.height` of 1093, so the condition was never true.
|
$(document).ready(function() {
var win = $(window);
win.scroll(function() {
if ($(document).height() - win.height() - win.scrollTop() < 150) {
var formData = $('#pagination form:last').serialize();
if (formData) {
$('#pagination').html('<div class="loading-spinner"></div>');
$.post('./', formData, function (data) {
var body = $(data);
$('#pagination').remove();
$('#main_results').append('<hr/>');
$('#main_results').append(body.find('.result'));
$('#main_results').append(body.find('#pagination'));
});
}
}
});
});
|
$(document).ready(function() {
var win = $(window);
win.scroll(function() {
if ($(document).height() - win.height() == win.scrollTop()) {
var formData = $('#pagination form:last').serialize();
if (formData) {
$('#pagination').html('<div class="loading-spinner"></div>');
$.post('./', formData, function (data) {
var body = $(data);
$('#pagination').remove();
$('#main_results').append('<hr/>');
$('#main_results').append(body.find('.result'));
$('#main_results').append(body.find('#pagination'));
});
}
}
});
});
|
Create SiteMeta for default site
|
"""
Business logic in this app is implemented using a CQRS style. Commands should
be implemented as functions here. Queries should be implemented as methods on
Django model managers. Commands can then be called from a management command
(i.e. the CLI), a view, a signal, etc.
"""
from django.conf import settings
from django.contrib.sites.models import Site
from webquills.core.models import SiteMeta
def initialize_site():
"""
For development environments, set up the Site and home page objects.
"""
try:
site = Site.objects.get(id=settings.SITE_ID)
# If the default site was already created, just update its properties
site.domain = "webquills.com"
site.name = "WebQuills"
except Site.DoesNotExist:
site = Site.objects.create(
id=settings.SITE_ID,
domain="webquills.com",
name="WebQuills",
)
if not hasattr(site, "meta"):
SiteMeta.objects.create(site=site)
site.save()
|
"""
Business logic in this app is implemented using a CQRS style. Commands should
be implemented as functions here. Queries should be implemented as methods on
Django model managers. Commands can then be called from a management command
(i.e. the CLI), a view, a signal, etc.
"""
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.contrib.sites.models import Site
def initialize_site():
"""
For development environments, set up the Site and home page objects.
"""
try:
site = Site.objects.get(id=settings.SITE_ID)
# If the default site was already created, just update its properties
site.domain = "webquills.com"
site.name = "WebQuills"
except Site.DoesNotExist:
site = Site.objects.create(
id=settings.SITE_ID,
domain="webquills.com",
name="WebQuills",
)
site.save()
|
Change version to 2.6 as feature was added
|
#!/usr/bin/env python
from setuptools import setup
import os
# Utility function to read README file
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='iss',
version='2.6',
description="Ideally Single Source app for MemberSuite data.",
author='AASHE',
author_email='it@aashe.org',
url='https://github.com/aashe/iss',
long_description=read("README.md"),
packages=[
'iss',
],
include_package_data=True,
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Framework :: Django',
],
install_requires=[
"beatbox==32.1",
"membersuite_api_client==0.4.3",
"pycountry",
"pyYAML==3.12",
]
)
|
#!/usr/bin/env python
from setuptools import setup
import os
# Utility function to read README file
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='iss',
version='2.5.10',
description="Ideally Single Source app for MemberSuite data.",
author='AASHE',
author_email='it@aashe.org',
url='https://github.com/aashe/iss',
long_description=read("README.md"),
packages=[
'iss',
],
include_package_data=True,
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Framework :: Django',
],
install_requires=[
"beatbox==32.1",
"membersuite_api_client==0.4.3",
"pycountry",
"pyYAML==3.12",
]
)
|
Fix bench decorator to return a dict instead of a list
|
from functools import wraps
from inspect import getcallargs
from timer import Timer
def bench(f):
"""Times a function given specific arguments."""
timer = Timer(tick_now=False)
@wraps(f)
def wrapped(*args, **kwargs):
timer.start()
f(*args, **kwargs)
timer.stop()
res = {call_signature(f, *args, **kwargs): timer.get_times()['real']} # TODO penser a quel temps garder
return res
return wrapped
def call_signature(f, *args, **kwargs):
"""Return a string representation of a function call."""
call_args = getcallargs(f, *args, **kwargs)
return ';'.join(["%s=%s" % (k, v) for k, v in call_args.items()])
@bench
def lala(a, b, c="default c", d="default d"):
print("lala est appelee")
if __name__ == '__main__':
print(lala("cest a", "cest b", d="change d"))
|
from functools import wraps
from inspect import getcallargs
from timer import Timer
def bench(f):
"""Times a function given specific arguments."""
timer = Timer(tick_now=False)
@wraps(f)
def wrapped(*args, **kwargs):
timer.start()
f(*args, **kwargs)
timer.stop()
res = [call_signature(f, *args, **kwargs),
timer.get_times()['real']] # TODO penser a quel temps garder
return res
return wrapped
def call_signature(f, *args, **kwargs):
"""Return a string representation of a function call"""
call_args = getcallargs(f, *args, **kwargs)
return ';'.join(["%s=%s" % (k, v) for k, v in call_args.items()])
@bench
def lala(a, b, c="default c", d="default d"):
print("lala est appelee")
if __name__ == '__main__':
print(lala("cest a", "cest b", d="change d"))
|
Use reduce to avoid mutating the object unneededly.
|
'use strict';
const externalDeps = [
'@angular/*',
'angular',
'angular-mocks',
'rxjs/*',
'lodash',
'moment',
'moment-timezone',
];
const map = {
'@angular': 'node_modules/@angular',
'angular': 'node_modules/angular/angular',
'angular-mocks': 'node_modules/angular-mocks/angular-mocks',
'rxjs': 'node_modules/rxjs',
'lodash': 'node_modules/lodash/index',
'node-uuid': 'node_modules/node-uuid/uuid',
'moment': 'node_modules/moment/moment',
'moment-timezone': 'node_modules/moment-timezone/builds/moment-timezone-with-data.min',
};
const meta = externalDeps.reduce((curMeta, dep) => {
curMeta[dep] = { build: false };
return curMeta;
}, {});
System.config({
meta,
map,
paths: {
'*': '*.js',
},
});
|
'use strict';
const externalDeps = [
'@angular/*',
'angular',
'angular-mocks',
'rxjs/*',
'lodash',
'moment',
'moment-timezone',
];
const map = {
'@angular': 'node_modules/@angular',
'angular': 'node_modules/angular/angular',
'angular-mocks': 'node_modules/angular-mocks/angular-mocks',
'rxjs': 'node_modules/rxjs',
'lodash': 'node_modules/lodash/index',
'node-uuid': 'node_modules/node-uuid/uuid',
'moment': 'node_modules/moment/moment',
'moment-timezone': 'node_modules/moment-timezone/builds/moment-timezone-with-data.min',
}
let meta = {};
externalDeps.forEach(dep => {
meta[dep] = { build: false };
});
System.config({
meta,
map,
paths: {
'*': '*.js',
},
});
|
[Storybook] Remove header of addon-info temporary
Since the inline styling is damn strange now, header and preview parts are split into separate shadowed containers.
Issue: storybooks/storybook#1877
|
import { configure, setAddon } from '@storybook/react';
import { setOptions } from '@storybook/addon-options';
import infoAddon, { setDefaults } from '@storybook/addon-info';
import propsTableAddon from './propsTable-addon';
import Code from './Code';
// -------------------------------------
// Addons
// -------------------------------------
setOptions({
name: 'iCHEF gypcrete',
url: 'https://github.com/iCHEF/gypcrete',
showDownPanel: true,
});
setDefaults({
inline: true,
propTables: false,
/**
* Remove the header temporary.
*
* Since the inline styling is damn strange now,
* header and preview parts are split into separate shadowed containers.
*
* @issue https://github.com/storybooks/storybook/issues/1877
*
* #FIXME: wait for storybooks/storybook#1501
*/
header: false,
/**
* Fix <Code> styling
*
* #FIXME: wait for storybooks/storybook#1501
*/
marksyConf: { code: Code }
});
setAddon(infoAddon);
setAddon(propsTableAddon);
// -------------------------------------
// Load Stories
// -------------------------------------
const reqContext = require.context(
'../examples/',
true,
/index\.js$/
);
function loadStories() {
reqContext.keys().forEach(reqContext);
}
configure(loadStories, module);
|
import { configure, setAddon } from '@storybook/react';
import { setOptions } from '@storybook/addon-options';
import infoAddon, { setDefaults } from '@storybook/addon-info';
import propsTableAddon from './propsTable-addon';
import Code from './Code';
// -------------------------------------
// Addons
// -------------------------------------
setOptions({
name: 'iCHEF gypcrete',
url: 'https://github.com/iCHEF/gypcrete',
showDownPanel: true,
});
setDefaults({
inline: true,
propTables: false,
/**
* Fix <Code> styling
*
* #FIXME: wait for storybooks/storybook#1501
*/
marksyConf: { code: Code }
});
setAddon(infoAddon);
setAddon(propsTableAddon);
// -------------------------------------
// Load Stories
// -------------------------------------
const reqContext = require.context(
'../examples/',
true,
/index\.js$/
);
function loadStories() {
reqContext.keys().forEach(reqContext);
}
configure(loadStories, module);
|
Remove unused statements from API controller
|
<?php
namespace AppBundle\Controller;
use Symfony\Bundle\FrameworkBundle\Controller\Controller;
use Symfony\Component\HttpFoundation\Request;
use AppBundle\API\Listing;
use Symfony\Component\Routing\Annotation\Route;
class APIController extends Controller
{
/**
* @Route("/api/listing/organisms", name="api_listing_organisms", options={"expose"=true})
*/
public function listingOrganismsAction(Request $request){
$organisms = $this->container->get(Listing\Organisms::class);
$response = $this->json($organisms->execute($request->query->get('limit'), "%".$request->query->get('search')."%"));
$response->headers->set('Access-Control-Allow-Origin', '*');
return $response;
}
private function getFennecUser(){
$user = null;
if($this->get('security.authorization_checker')->isGranted('IS_AUTHENTICATED_FULLY')){
$user = $this->get('security.token_storage')->getToken()->getUser();
}
return $user;
}
}
|
<?php
namespace AppBundle\Controller;
use Symfony\Bundle\FrameworkBundle\Controller\Controller;
use Symfony\Component\Config\Definition\Exception\Exception;
use Symfony\Component\HttpFoundation\ParameterBag;
use Symfony\Component\HttpFoundation\Request;
use AppBundle\API\Listing;
use Symfony\Component\Routing\Annotation\Route;
class APIController extends Controller
{
/**
* @Route("/api/listing/organisms", name="api_listing_organisms", options={"expose"=true})
*/
public function listingOrganismsAction(Request $request){
$organisms = $this->container->get(Listing\Organisms::class);
$response = $this->json($organisms->execute($request->query->get('limit'), "%".$request->query->get('search')."%"));
$response->headers->set('Access-Control-Allow-Origin', '*');
return $response;
}
private function getFennecUser(){
$user = null;
if($this->get('security.authorization_checker')->isGranted('IS_AUTHENTICATED_FULLY')){
$user = $this->get('security.token_storage')->getToken()->getUser();
}
return $user;
}
}
|
Update required properties for RCTCircleView
On React Native 0.19 these added properties were required.
|
var { requireNativeComponent, PropTypes } = require('react-native');
var iface = {
name: 'CircleView',
propTypes: {
showTextWhileSpinning: PropTypes.bool,
autoTextColor: PropTypes.bool,
autoTextSize: PropTypes.bool,
showUnit: PropTypes.bool,
сontourColor: PropTypes.string,
barColor: PropTypes.array,
rimColor: PropTypes.string,
value: PropTypes.number,
maxValue: PropTypes.number,
valueAnimated: PropTypes.number,
contourSize: PropTypes.number,
barWidth: PropTypes.number,
rimWidth: PropTypes.number,
unitSize: PropTypes.number,
textSize: PropTypes.number,
renderToHardwareTextureAndroid: PropTypes.bool,
onLayout: PropTypes.bool,
importantForAccessibility: PropTypes.string,
accessibilityLabel: PropTypes.string,
accessibilityLiveRegion: PropTypes.string,
accessibilityComponentType: PropTypes.string,
testID: PropTypes.string
}
};
module.exports = requireNativeComponent('RCTCircleView', iface);
|
var { requireNativeComponent, PropTypes } = require('react-native');
var iface = {
name: 'CircleView',
propTypes: {
showTextWhileSpinning: PropTypes.bool,
autoTextColor: PropTypes.bool,
autoTextSize: PropTypes.bool,
showUnit: PropTypes.bool,
сontourColor: PropTypes.string,
barColor: PropTypes.array,
rimColor: PropTypes.string,
value: PropTypes.number,
maxValue: PropTypes.number,
valueAnimated: PropTypes.number,
contourSize: PropTypes.number,
barWidth: PropTypes.number,
rimWidth: PropTypes.number,
unitSize: PropTypes.number,
textSize: PropTypes.number,
},
};
module.exports = requireNativeComponent('RCTCircleView', iface);
|
Fix "UnboundLocalError: local variable 'dep' referenced before assignment"
|
from django import template
from django.template.defaultfilters import stringfilter
from django.utils.safestring import mark_safe
from dapi import models
register = template.Library()
@register.filter(needs_autoescape=True)
@stringfilter
def deplink(value, autoescape=None):
'''Add links for required daps'''
usedmark = ''
for mark in '< > ='.split():
split = value.split(mark)
if len(split) > 1:
usedmark = mark
break
dep = ''
if usedmark:
dap = split[0]
else:
dap = value
dep = dep.strip()
try:
m = models.MetaDap.objects.get(package_name=dap)
link = '<a href="' + m.get_human_link() + '">' + dap + '</a>'
except models.MetaDap.DoesNotExist:
link = '<abbr title="This dap is not on Dapi">' + dap + '</abbr>'
if usedmark:
link = link + usedmark + usedmark.join(split[1:])
return mark_safe(link)
|
from django import template
from django.template.defaultfilters import stringfilter
from django.utils.safestring import mark_safe
from dapi import models
register = template.Library()
@register.filter(needs_autoescape=True)
@stringfilter
def deplink(value, autoescape=None):
'''Add links for required daps'''
usedmark = ''
for mark in '< > ='.split():
split = value.split(mark)
if len(split) > 1:
usedmark = mark
break
if usedmark:
dap = split[0]
else:
dap = value
dep = dep.strip()
try:
m = models.MetaDap.objects.get(package_name=dap)
link = '<a href="' + m.get_human_link() + '">' + dap + '</a>'
except models.MetaDap.DoesNotExist:
link = '<abbr title="This dap is not on Dapi">' + dap + '</abbr>'
if usedmark:
link = link + usedmark + usedmark.join(split[1:])
return mark_safe(link)
|
Disable colours when the output is not a terminal
|
package main
import (
"os"
"github.com/agtorre/gocolorize"
"github.com/jessevdk/go-flags"
"github.com/src-d/beanstool/cli"
"golang.org/x/crypto/ssh/terminal"
)
func main() {
if !terminal.IsTerminal(int(os.Stdout.Fd())) {
gocolorize.SetPlain(true)
}
parser := flags.NewNamedParser("beanstool", flags.Default)
parser.AddCommand("stats", "print stats on all tubes", "", &cli.StatsCommand{})
parser.AddCommand("tail", "tails a tube and prints his content", "", &cli.TailCommand{})
parser.AddCommand("peek", "peeks a job from a queue", "", &cli.PeekCommand{})
parser.AddCommand("delete", "delete a job from a queue", "", &cli.DeleteCommand{})
parser.AddCommand("kick", "kicks jobs from buried back into ready", "", &cli.KickCommand{})
parser.AddCommand("put", "put a job into a tube", "", &cli.PutCommand{})
parser.AddCommand("bury", "bury existing jobs from ready state", "", &cli.BuryCommand{})
_, err := parser.Parse()
if err != nil {
if _, ok := err.(*flags.Error); ok {
parser.WriteHelp(os.Stdout)
}
os.Exit(1)
}
}
|
package main
import (
"os"
"github.com/jessevdk/go-flags"
"github.com/src-d/beanstool/cli"
)
func main() {
parser := flags.NewNamedParser("beanstool", flags.Default)
parser.AddCommand("stats", "print stats on all tubes", "", &cli.StatsCommand{})
parser.AddCommand("tail", "tails a tube and prints his content", "", &cli.TailCommand{})
parser.AddCommand("peek", "peeks a job from a queue", "", &cli.PeekCommand{})
parser.AddCommand("delete", "delete a job from a queue", "", &cli.DeleteCommand{})
parser.AddCommand("kick", "kicks jobs from buried back into ready", "", &cli.KickCommand{})
parser.AddCommand("put", "put a job into a tube", "", &cli.PutCommand{})
parser.AddCommand("bury", "bury existing jobs from ready state", "", &cli.BuryCommand{})
_, err := parser.Parse()
if err != nil {
if _, ok := err.(*flags.Error); ok {
parser.WriteHelp(os.Stdout)
}
os.Exit(1)
}
}
|
Add other options to install packages
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import weka.core.jvm as jvm
import weka.core.packages as packages
jvm.start()
# checking for installed packages
installed_packages = packages.installed_packages()
for item in installed_packages:
print item.name, item.url, "is installed\n"
# # Search for GridSearch and LibSVM, just to check package's names
# all_packages = packages.all_packages()
# for item in all_packages:
# if (item.name == "gridSearch") or (item.name == "LibSVM"):
# print(item.name + " " + item.url)
# To install gridSearch and LibSVM
# packages.install_package("gridSearch", "1.0.8")
# packages.install_package("LibSVM")
# To install MultiSearch
# packages.install_package("https://github.com/fracpete/multisearch-weka-package/releases/download/" +
# "v2016.1.30/multisearch-2016.1.30.zip")
# packages.install_package("/home/sebastian/Descargas/multisearch-2016.1.30.zip")
# packages.uninstall_package("multisearch")
jvm.stop()
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import weka.core.jvm as jvm
import weka.core.packages as packages
jvm.start()
# checking for installed packages
installed_packages = packages.installed_packages()
for item in installed_packages:
print item.name, item.url, "is installed\n"
# Search for GridSearch and LibSVM, just to check package's names
all_packages = packages.all_packages()
for item in all_packages:
if (item.name == "gridSearch") or (item.name == "LibSVM"):
print(item.name + " " + item.url)
# To install gridSearch and LibSVM
# packages.install_package("gridSearch", "1.0.8")
# packages.install_package("LibSVM")
# To install MultiSearch
# packages.install_package("https://github.com/fracpete/multisearch-weka-package/releases/download/v2014.12.10/multisearch-2014.12.10.zip")
jvm.stop()
|
Improve performance by prefixing all global functions calls with \ to
skip the look up and resolve process and go straight to the global
function
Signed-off-by: Mior Muhammad Zaki <e1a543840a942eb68427510a8a483282a7bfeddf@gmail.com>
|
<?php
namespace Orchestra\Translation;
use Illuminate\Translation\FileLoader as BaseFileLoader;
class FileLoader extends BaseFileLoader
{
/**
* {@inheritdoc}
*/
protected function loadNamespaceOverrides(array $lines, $locale, $group, $namespace)
{
$files = [
"{$this->path}/packages/{$namespace}/{$locale}/{$group}.php",
"{$this->path}/vendor/{$locale}/{$namespace}/{$group}.php",
];
foreach ($files as $file) {
$lines = $this->mergeEnvironments($lines, $file);
}
return $lines;
}
/**
* Merge the items in the given file into the items.
*
* @param array $lines
* @param string $file
*
* @return array
*/
public function mergeEnvironments(array $lines, string $file): array
{
if ($this->files->exists($file)) {
$lines = \array_replace_recursive($lines, $this->files->getRequire($file));
}
return $lines;
}
}
|
<?php
namespace Orchestra\Translation;
use Illuminate\Translation\FileLoader as BaseFileLoader;
class FileLoader extends BaseFileLoader
{
/**
* {@inheritdoc}
*/
protected function loadNamespaceOverrides(array $lines, $locale, $group, $namespace)
{
$files = [
"{$this->path}/packages/{$namespace}/{$locale}/{$group}.php",
"{$this->path}/vendor/{$locale}/{$namespace}/{$group}.php",
];
foreach ($files as $file) {
$lines = $this->mergeEnvironments($lines, $file);
}
return $lines;
}
/**
* Merge the items in the given file into the items.
*
* @param array $lines
* @param string $file
*
* @return array
*/
public function mergeEnvironments(array $lines, string $file): array
{
if ($this->files->exists($file)) {
$lines = array_replace_recursive($lines, $this->files->getRequire($file));
}
return $lines;
}
}
|
Fix definition of a global variable
|
var spawn = require('child_process').spawn,
objectAssign = require('object-assign');
module.exports = function exec(script, program) {
var dotenvConfig = objectAssign({ silent: true }, {
encoding: program.encoding,
path: program.path
});
require('dotenv').config(dotenvConfig);
var argvIndex = -1,
firstArg = program.args[0];
if (firstArg !== undefined) {
argvIndex = process.argv.indexOf(firstArg);
}
var argv = argvIndex === -1 ? [] : process.argv.splice(argvIndex + 1),
command = (typeof script === 'string' ? script : script.command) + ' ' + argv.join(' ');
script.env = script.env || {};
var env = objectAssign({}, process.env, script.env),
sh = 'sh',
shFlag = '-c';
if (process.platform === 'win32') {
sh = 'cmd';
shFlag = '/c';
command = '"' + command.trim() + '"';
}
if (!program.silent) {
console.log('to be executed:', command);
}
spawn(sh, [shFlag, command], {
env: env,
windowsVerbatimArguments: process.platform === 'win32',
stdio: 'inherit'
}).on('close', function(code) {
process.exit(code);
});
};
|
var spawn = require('child_process').spawn,
objectAssign = require('object-assign');
module.exports = function exec(script, program) {
var dotenvConfig = objectAssign({ silent: true }, {
encoding: program.encoding,
path: program.path
});
require('dotenv').config(dotenvConfig);
var argvIndex = -1;
firstArg = program.args[0];
if (firstArg !== undefined) {
argvIndex = process.argv.indexOf(firstArg);
}
var argv = argvIndex === -1 ? [] : process.argv.splice(argvIndex + 1),
command = (typeof script === 'string' ? script : script.command) + ' ' + argv.join(' ');
script.env = script.env || {};
var env = objectAssign({}, process.env, script.env),
sh = 'sh',
shFlag = '-c';
if (process.platform === 'win32') {
sh = 'cmd';
shFlag = '/c';
command = '"' + command.trim() + '"';
}
if (!program.silent) {
console.log('to be executed:', command);
}
spawn(sh, [shFlag, command], {
env: env,
windowsVerbatimArguments: process.platform === 'win32',
stdio: 'inherit'
}).on('close', function(code) {
process.exit(code);
});
};
|
Reorder TemplateSpec attributes and add to docstring.
|
# coding: utf-8
"""
Provides a class to customize template information on a per-view basis.
To customize template properties for a particular view, create that view
from a class that subclasses TemplateSpec. The "spec" in TemplateSpec
stands for "special" or "specified" template information.
"""
class TemplateSpec(object):
"""
A mixin or interface for specifying custom template information.
The "spec" in TemplateSpec can be taken to mean that the template
information is either "specified" or "special."
A view should subclass this class only if customized template loading
is needed. The following attributes allow one to customize/override
template information on a per view basis. A None value means to use
default behavior for that value and perform no customization. All
attributes are initialized to None.
Attributes:
template: the template as a string.
template_encoding: the encoding used by the template.
template_extension: the template file extension. Defaults to "mustache".
Pass False for no extension (i.e. extensionless template files).
template_name: the name of the template.
template_rel_directory: the directory containing the template file,
relative to the directory containing the module defining the class.
template_rel_path: the path to the template file, relative to the
directory containing the module defining the class.
"""
template = None
template_encoding = None
template_extension = None
template_name = None
template_rel_directory = None
template_rel_path = None
|
# coding: utf-8
"""
Provides a class to customize template information on a per-view basis.
To customize template properties for a particular view, create that view
from a class that subclasses TemplateSpec. The "Spec" in TemplateSpec
stands for template information that is "special" or "specified".
"""
# TODO: finish the class docstring.
class TemplateSpec(object):
"""
A mixin or interface for specifying custom template information.
The "spec" in TemplateSpec can be taken to mean that the template
information is either "specified" or "special."
A view should subclass this class only if customized template loading
is needed. The following attributes allow one to customize/override
template information on a per view basis. A None value means to use
default behavior for that value and perform no customization. All
attributes are initialized to None.
Attributes:
template: the template as a string.
template_rel_path: the path to the template file, relative to the
directory containing the module defining the class.
template_rel_directory: the directory containing the template file, relative
to the directory containing the module defining the class.
template_extension: the template file extension. Defaults to "mustache".
Pass False for no extension (i.e. extensionless template files).
"""
template = None
template_rel_path = None
template_rel_directory = None
template_name = None
template_extension = None
template_encoding = None
|
Use unabbreviated --quiet-git option in clone test
|
import unittest
from cvsgit.command.clone import clone
from os.path import dirname, join, exists
from shutil import rmtree
class Test(unittest.TestCase):
def setUp(self):
self.tmpdir = join(dirname(__file__), 'tmp')
def tearDown(self):
if exists(self.tmpdir):
rmtree(self.tmpdir)
def testZombieDetection(self):
# This repository has a zombie copy of a file that was actually
# moved to Attic.
cvsroot = join(dirname(__file__), 'data', 'zombie')
# TODO: Discard command output to keep the test output clean.
self.assertEquals(clone().eval('--quiet-git', cvsroot, self.tmpdir), 0)
# TODO: Verify that the correct file was picked and the zombie
# got ignored.
if __name__ == '__main__':
unittest.main()
|
import unittest
from cvsgit.command.clone import clone
from os.path import dirname, join, exists
from shutil import rmtree
class Test(unittest.TestCase):
def setUp(self):
self.tmpdir = join(dirname(__file__), 'tmp')
def tearDown(self):
if exists(self.tmpdir):
rmtree(self.tmpdir)
def testZombieDetection(self):
# This repository has a zombie copy of a file that was actually
# moved to Attic.
cvsroot = join(dirname(__file__), 'data', 'zombie')
# TODO: Discard command output to keep the test output clean.
self.assertEquals(clone().eval('--quiet', cvsroot, self.tmpdir), 0)
# TODO: Verify that the correct file was picked and the zombie
# got ignored.
if __name__ == '__main__':
unittest.main()
|
Fix duplicate volumes_from when switching consoles
|
package switchconsole
import (
"os"
log "github.com/Sirupsen/logrus"
composeConfig "github.com/docker/libcompose/config"
"github.com/docker/libcompose/project/options"
"github.com/rancher/os/compose"
"github.com/rancher/os/config"
"golang.org/x/net/context"
)
func Main() {
if len(os.Args) != 2 {
log.Fatal("Must specify exactly one existing container")
}
newConsole := os.Args[1]
cfg := config.LoadConfig()
project, err := compose.GetProject(cfg, true, false)
if err != nil {
log.Fatal(err)
}
if newConsole != "default" {
project.ServiceConfigs.Add("console", &composeConfig.ServiceConfig{})
if err = compose.LoadService(project, cfg, true, newConsole); err != nil {
log.Fatal(err)
}
}
if err = config.Set("rancher.console", newConsole); err != nil {
log.Errorf("Failed to update 'rancher.console': %v", err)
}
if err = project.Up(context.Background(), options.Up{
Log: true,
}, "console"); err != nil {
log.Fatal(err)
}
if err = project.Restart(context.Background(), 10, "docker"); err != nil {
log.Errorf("Failed to restart Docker: %v", err)
}
}
|
package switchconsole
import (
"os"
log "github.com/Sirupsen/logrus"
"github.com/docker/libcompose/project/options"
"github.com/rancher/os/compose"
"github.com/rancher/os/config"
"golang.org/x/net/context"
)
func Main() {
if len(os.Args) != 2 {
log.Fatal("Must specify exactly one existing container")
}
newConsole := os.Args[1]
cfg := config.LoadConfig()
project, err := compose.GetProject(cfg, true, false)
if err != nil {
log.Fatal(err)
}
if newConsole != "default" {
if err = compose.LoadService(project, cfg, true, newConsole); err != nil {
log.Fatal(err)
}
}
if err = config.Set("rancher.console", newConsole); err != nil {
log.Errorf("Failed to update 'rancher.console': %v", err)
}
if err = project.Up(context.Background(), options.Up{
Log: true,
}, "console"); err != nil {
log.Fatal(err)
}
if err = project.Restart(context.Background(), 10, "docker"); err != nil {
log.Errorf("Failed to restart Docker: %v", err)
}
}
|
Add test, which fails, of the gammatone filtering.
|
from __future__ import division, print_function
import pytest
import numpy as np
from pambox import auditory as aud
import scipy.io as sio
from numpy.testing import assert_allclose
def test_lowpass_filtering_of_envelope():
mat = sio.loadmat("./test_files/test_hilbert_env_and_lp_filtering_v1.mat",
squeeze_me=True)
envelope = mat['unfiltered_env']
target = mat['lp_filtered_env']
filtered_envelope = aud.lowpass_env_filtering(envelope, 150., 1, 22050.)
assert_allclose(filtered_envelope, target, atol=1e-7)
def test_erb():
bw = aud.erbbw(1000)
assert_allclose(bw, 132.63, rtol=1e-4)
# We use a different implementation than the Matlab one and the delay
# are different.
@pytest.mark.xfail
def test_gammatone_filtering():
mat = sio.loadmat('./test_files/test_gammatone_filtering.mat')
center_f = mat['midfreq'].squeeze()
fs = mat['fs'].squeeze()
signal = mat['signal'].squeeze()
targets = mat['GT_output'].squeeze()
target = targets[:,:,0].T
out = aud.gammatone_filtering(signal, center_f, fs)
assert_allclose(out, target)
|
from __future__ import division, print_function
import pytest
import numpy as np
from pambox import auditory as aud
import scipy.io as sio
from numpy.testing import assert_allclose
def test_lowpass_filtering_of_envelope():
mat = sio.loadmat("./test_files/test_hilbert_env_and_lp_filtering_v1.mat",
squeeze_me=True)
envelope = mat['unfiltered_env']
target = mat['lp_filtered_env']
filtered_envelope = aud.lowpass_env_filtering(envelope, 150., 1, 22050.)
assert_allclose(filtered_envelope, target, atol=1e-7)
def test_erb():
bw = aud.erbbw(1000)
assert_allclose(bw, 132.63, rtol=1e-4)
|
Add filters to the Mongo DB example.
|
from chatterbot import ChatBot
from chatterbot.filters import LanguageFilter, RepetitiveResponseFilter
import logging
# Uncomment the following line to enable verbose logging
# logging.basicConfig(level=logging.INFO)
# Create a new ChatBot instance
bot = ChatBot("Terminal",
storage_adapter="chatterbot.adapters.storage.MongoDatabaseAdapter",
logic_adapters=[
"chatterbot.adapters.logic.ClosestMatchAdapter"
],
filters=(
LanguageFilter,
RepetitiveResponseFilter
),
input_adapter="chatterbot.adapters.input.TerminalAdapter",
output_adapter="chatterbot.adapters.output.TerminalAdapter",
database="chatterbot-database"
)
print("Type something to begin...")
while True:
try:
bot_input = bot.get_response(None)
# Press ctrl-c or ctrl-d on the keyboard to exit
except (KeyboardInterrupt, EOFError, SystemExit):
break
|
from chatterbot import ChatBot
from chatterbot.filters import LanguageFilter
import logging
# Uncomment the following line to enable verbose logging
# logging.basicConfig(level=logging.INFO)
# Create a new ChatBot instance
bot = ChatBot("Terminal",
storage_adapter="chatterbot.adapters.storage.MongoDatabaseAdapter",
logic_adapters=[
"chatterbot.adapters.logic.ClosestMatchAdapter"
],
filters=(
LanguageFilter,
),
input_adapter="chatterbot.adapters.input.TerminalAdapter",
output_adapter="chatterbot.adapters.output.TerminalAdapter",
database="chatterbot-database"
)
print("Type something to begin...")
while True:
try:
bot_input = bot.get_response(None)
# Press ctrl-c or ctrl-d on the keyboard to exit
except (KeyboardInterrupt, EOFError, SystemExit):
break
|
Fix go version in another brats test
Co-authored-by: An Yu <446e9838deaeb979640d56c46523b7c882d598cd@pivotal.io>
|
package brats_test
import (
"github.com/cloudfoundry/libbuildpack/bratshelper"
"github.com/cloudfoundry/libbuildpack/cutlass"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
var _ = Describe("Go buildpack", func() {
bratshelper.UnbuiltBuildpack("go", CopyBrats)
bratshelper.DeployingAnAppWithAnUpdatedVersionOfTheSameBuildpack(CopyBrats)
bratshelper.StagingWithBuildpackThatSetsEOL("go", func(_ string) *cutlass.App {
return CopyBrats("1.8.7")
})
bratshelper.StagingWithADepThatIsNotTheLatest("go", func(_ string) *cutlass.App {
return CopyBrats("1.8.6")
})
bratshelper.StagingWithCustomBuildpackWithCredentialsInDependencies(CopyBrats)
bratshelper.DeployAppWithExecutableProfileScript("go", CopyBrats)
bratshelper.DeployAnAppWithSensitiveEnvironmentVariables(CopyBrats)
bratshelper.ForAllSupportedVersions("go", CopyBrats, func(goVersion string, app *cutlass.App) {
PushApp(app)
By("installs the correct go version", func() {
Expect(app.Stdout.String()).To(ContainSubstring("Installing go " + goVersion))
})
By("runs a simple webserver", func() {
Expect(app.GetBody("/")).To(ContainSubstring("Hello World!"))
})
})
})
|
package brats_test
import (
"github.com/cloudfoundry/libbuildpack/bratshelper"
"github.com/cloudfoundry/libbuildpack/cutlass"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
var _ = Describe("Go buildpack", func() {
bratshelper.UnbuiltBuildpack("go", CopyBrats)
bratshelper.DeployingAnAppWithAnUpdatedVersionOfTheSameBuildpack(CopyBrats)
bratshelper.StagingWithBuildpackThatSetsEOL("go", func(_ string) *cutlass.App {
return CopyBrats("1.8.7")
})
bratshelper.StagingWithADepThatIsNotTheLatest("go", CopyBrats)
bratshelper.StagingWithCustomBuildpackWithCredentialsInDependencies(CopyBrats)
bratshelper.DeployAppWithExecutableProfileScript("go", CopyBrats)
bratshelper.DeployAnAppWithSensitiveEnvironmentVariables(CopyBrats)
bratshelper.ForAllSupportedVersions("go", CopyBrats, func(goVersion string, app *cutlass.App) {
PushApp(app)
By("installs the correct go version", func() {
Expect(app.Stdout.String()).To(ContainSubstring("Installing go " + goVersion))
})
By("runs a simple webserver", func() {
Expect(app.GetBody("/")).To(ContainSubstring("Hello World!"))
})
})
})
|
Make sure we only fetch the member roles we want to be displayed on the members page.
|
<?php
/**
* Verb Path Action
* GET /members index
* GET /members/create create
* POST /members store
* GET /members/{id} show
* GET /members/{id}/edit edit
* PUT/PATCH /members/{id} update
* DELETE /members/{id} destroy
*/
class MembersController extends BaseController {
/**
* The database table implementation.
*
* @var Members
*/
protected $member_roles;
/**
* Set our Member instance.
*
* @param Member $member
* @return void
*/
public function __construct(MemberRoles $member_roles)
{
$this->member_roles = $member_roles;
}
/**
* GET -> /members
*
* @return View
*/
public function index ()
{
$roles = $this->member_roles->with(array('members' => function($query)
{
$query->orderBy('last_name')->orderBy('first_name');
}))
->where('id', '<>', 6)->get();
return View::make('members.index')->with('roles', $roles);
}
}
|
<?php
/**
* Verb Path Action
* GET /members index
* GET /members/create create
* POST /members store
* GET /members/{id} show
* GET /members/{id}/edit edit
* PUT/PATCH /members/{id} update
* DELETE /members/{id} destroy
*/
class MembersController extends BaseController {
/**
* The database table implementation.
*
* @var Members
*/
protected $member_roles;
/**
* Set our Member instance.
*
* @param Member $member
* @return void
*/
public function __construct(MemberRoles $member_roles)
{
$this->member_roles = $member_roles;
}
/**
* GET -> /members
*
* @return View
*/
public function index ()
{
$roles = $this->member_roles->with(array('members' => function($query)
{
$query->orderBy('last_name')->orderBy('first_name');
}))->get();
return View::make('members.index')->with('roles', $roles);
}
}
|
[Fix] Fix some fields being overwritten by undefined
When a required field is absent, it's substitute, even when existing, was being overwritten with
undefined or [undefined]
|
const sanitizer = require('../../../sanitizer');
/**
* Loop and sanitize fields of an item to create a new one
*
* @param {Array<string>} fields List of fields to look inside item
* @param {Object} item The item to be looped and sanitized
* @param {Object} services Sanitization methods and sanitize list services
* @return {Object} A new object representing the sanitized item
*/
const loop = (fields, item) => {
const data = {};
for (let f = fields.length - 1; f >= 0; f--) {
let key = fields[f];
let value = item[key];
// sanitize keys and values
// TODO: Document keys and values sanitization better
value = sanitizer.value(key, value, sanitizer);
key = sanitizer.key(key, item);
// update the field
if (value && value.constructor === Array) {
// if it's not an invalid array
if (value[0]) {
Object.assign(data, {[key]: value});
}
} else if (value) {
// or if it's a valid single value
Object.assign(data, {[key]: value});
}
}
// Return the new object
return data;
};
module.exports = loop;
|
const sanitizer = require('../../../sanitizer');
/**
* Loop and sanitize fields of an item to create a new one
*
* @param {Array<string>} fields List of fields to look inside item
* @param {Object} item The item to be looped and sanitized
* @param {Object} services Sanitization methods and sanitize list services
* @return {Object} A new object representing the sanitized item
*/
const loop = (fields, item) => {
const data = {};
for (let f = fields.length - 1; f >= 0; f--) {
let key = fields[f];
let value = item[key];
// sanitize keys and values
// TODO: Document keys and values sanitization better
value = sanitizer.value(key, value, sanitizer);
key = sanitizer.key(key, sanitizer);
// update the field
if (value) {
Object.assign(data, {[key]: value});
}
}
// Return the new object
return data;
};
module.exports = loop;
|
Move test data into the scope where it is used
|
package search
import "testing"
func TestSubset(t *testing.T) {
t.Run("Asset", func(t *testing.T) {
a := &Asset{
VideoID: "video-id-123",
Type: "movie",
}
sub := Hit(a).Subset()
if got, want := sub.ID, a.VideoID; got != want {
t.Fatalf("sub.ID = %q, want %q", got, want)
}
if got, want := sub.Type, a.Type; got != want {
t.Fatalf("sub.Type = %q, want %q", got, want)
}
})
t.Run("Series", func(t *testing.T) {
s := &Series{
BrandID: "brand-id-345",
Type: "series",
}
sub := Hit(s).Subset()
if got, want := sub.ID, s.BrandID; got != want {
t.Fatalf("sub.ID = %q, want %q", got, want)
}
if got, want := sub.Type, s.Type; got != want {
t.Fatalf("sub.Type = %q, want %q", got, want)
}
})
}
|
package search
import "testing"
func TestSubset(t *testing.T) {
var (
a = &Asset{
VideoID: "video-id-123",
Type: "movie",
}
s = &Series{
BrandID: "brand-id-345",
Type: "series",
}
)
t.Run("Asset", func(t *testing.T) {
sub := Hit(a).Subset()
if got, want := sub.ID, a.VideoID; got != want {
t.Fatalf("sub.ID = %q, want %q", got, want)
}
if got, want := sub.Type, a.Type; got != want {
t.Fatalf("sub.Type = %q, want %q", got, want)
}
})
t.Run("Series", func(t *testing.T) {
sub := Hit(s).Subset()
if got, want := sub.ID, s.BrandID; got != want {
t.Fatalf("sub.ID = %q, want %q", got, want)
}
if got, want := sub.Type, s.Type; got != want {
t.Fatalf("sub.Type = %q, want %q", got, want)
}
})
}
|
Mark 2012.2 final in prep for RC1
Mark 2012.2 Final=True as we prepare to publish Nova Folsom RC1
Change-Id: I72731bded164aeec3c7e47f6bfe44fb219a9ea56
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
NOVA_VERSION = ['2012', '2', None]
YEAR, COUNT, REVISION = NOVA_VERSION
FINAL = True # This becomes true at Release Candidate time
def canonical_version_string():
return '.'.join(filter(None, NOVA_VERSION))
def version_string():
if FINAL:
return canonical_version_string()
else:
return '%s-dev' % (canonical_version_string(),)
def vcs_version_string():
return 'LOCALBRANCH:LOCALREVISION'
def version_string_with_vcs():
return '%s-%s' % (canonical_version_string(), vcs_version_string())
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
NOVA_VERSION = ['2012', '2', None]
YEAR, COUNT, REVISION = NOVA_VERSION
FINAL = False # This becomes true at Release Candidate time
def canonical_version_string():
return '.'.join(filter(None, NOVA_VERSION))
def version_string():
if FINAL:
return canonical_version_string()
else:
return '%s-dev' % (canonical_version_string(),)
def vcs_version_string():
return 'LOCALBRANCH:LOCALREVISION'
def version_string_with_vcs():
return '%s-%s' % (canonical_version_string(), vcs_version_string())
|
Set loaded flag on a task to false. Only load the task template if the task is clicked on (preloading the templates for all tasks is an expensive operation when there are a decent (20+) number of tasks).
|
angular.module('materialscommons').factory('toUITask', toUITaskService);
/*@ngInject*/
function toUITaskService() {
return function(task) {
task.displayState = {
details: {
showNotes: true,
showFiles: false,
showSamples: false,
currentFilesTab: 0,
currentSamplesTab: 0,
loadEditor: false
},
flags: {
starredClass: task.flags.starred ? 'fa-star' : 'fa-star-o',
flaggedClass: task.flags.flagged ? 'mc-flagged-color' : 'mc-flag-not-set'
},
selectedClass: '',
editTitle: true,
open: false,
maximize: false
};
task.loaded = false;
task.node = null;
}
}
|
angular.module('materialscommons').factory('toUITask', toUITaskService);
/*@ngInject*/
function toUITaskService() {
return function(task) {
task.displayState = {
details: {
showNotes: true,
showFiles: false,
showSamples: false,
currentFilesTab: 0,
currentSamplesTab: 0,
loadEditor: false
},
flags: {
starredClass: task.flags.starred ? 'fa-star' : 'fa-star-o',
flaggedClass: task.flags.flagged ? 'mc-flagged-color' : 'mc-flag-not-set'
},
selectedClass: '',
editTitle: true,
open: false,
maximize: false
};
task.node = null;
}
}
|
Use real measure definitions in e2e tests
|
from __future__ import absolute_import
from .test import *
DATABASES = {
"default": {
"ENGINE": "django.contrib.gis.db.backends.postgis",
"NAME": utils.get_env_setting("E2E_DB_NAME"),
"USER": utils.get_env_setting("DB_USER"),
"PASSWORD": utils.get_env_setting("DB_PASS"),
"HOST": utils.get_env_setting("DB_HOST", "127.0.0.1"),
}
}
PIPELINE_METADATA_DIR = os.path.join(APPS_ROOT, "pipeline", "metadata")
PIPELINE_DATA_BASEDIR = os.path.join(APPS_ROOT, "pipeline", "e2e-test-data", "data", "")
PIPELINE_IMPORT_LOG_PATH = os.path.join(
APPS_ROOT, "pipeline", "e2e-test-data", "log.json"
)
SLACK_SENDING_ACTIVE = True
BQ_DEFAULT_TABLE_EXPIRATION_MS = 24 * 60 * 60 * 1000 # 24 hours
# We want to use the real measure definitions, not the test ones!
MEASURE_DEFINITIONS_PATH = os.path.join(APPS_ROOT, "measure_definitions")
|
from __future__ import absolute_import
from .test import *
DATABASES = {
"default": {
"ENGINE": "django.contrib.gis.db.backends.postgis",
"NAME": utils.get_env_setting("E2E_DB_NAME"),
"USER": utils.get_env_setting("DB_USER"),
"PASSWORD": utils.get_env_setting("DB_PASS"),
"HOST": utils.get_env_setting("DB_HOST", "127.0.0.1"),
}
}
PIPELINE_METADATA_DIR = os.path.join(APPS_ROOT, "pipeline", "metadata")
PIPELINE_DATA_BASEDIR = os.path.join(APPS_ROOT, "pipeline", "e2e-test-data", "data", "")
PIPELINE_IMPORT_LOG_PATH = os.path.join(
APPS_ROOT, "pipeline", "e2e-test-data", "log.json"
)
SLACK_SENDING_ACTIVE = True
BQ_DEFAULT_TABLE_EXPIRATION_MS = 24 * 60 * 60 * 1000 # 24 hours
|
Update lat and lng on marker dragged
|
$(document).ready(function() {
$("#id_place")
.geocomplete({
map: "#map_location",
mapOptions: { zoom: 10 },
markerOptions: { draggable: true }
})
.bind("geocode:result", function(event, result) {
$('#id_location').val(result.geometry.location.lat() + ',' + result.geometry.location.lng());
})
.bind("geocode:error", function(event, status){
console.log("ERROR: " + status);
})
.bind("geocode:multiple", function(event, results){
console.log("Multiple: " + results.length + " results found");
})
.bind("geocode:dragged", function(event, latLng){
$('#id_location').val(latLng.lat() + ',' + latLng.lng());
});
});
|
$(document).ready(function() {
$("#id_place")
.geocomplete({
map: "#map_location",
mapOptions: {
zoom: 10
},
markerOptions: {
draggable: true
}
})
.bind("geocode:result", function(event, result) {
var coordinates = result.geometry.location.lat() + ',' + result.geometry.location.lng();
$('#id_location').val(coordinates);
})
.bind("geocode:error", function(event, status){
console.log("ERROR: " + status);
})
.bind("geocode:multiple", function(event, results){
console.log("Multiple: " + results.length + " results found");
});
});
|
Reset genfors admin password in afterEach
|
jest.mock('../../models/meeting');
jest.mock('../../utils');
const { emit, broadcast } = require('../../utils');
const { createGenfors } = require('../../models/meeting');
const { generateGenfors, generateSocket } = require('../../utils/generateTestData');
const { createGenfors: createGenforsListener } = require('../auth');
const generateData = data => Object.assign({}, data);
describe('admin', () => {
it('returns invalid admin password if incorrect', async () => {
process.env.SDF_GENFORS_ADMIN_PASSWORD = 'correct';
await createGenforsListener(generateSocket(), generateData());
expect(emit.mock.calls).toMatchSnapshot();
expect(broadcast.mock.calls).toEqual([]);
});
it('creates a new meeting if admin password is correct', async () => {
createGenfors.mockImplementation(async () => generateGenfors());
process.env.SDF_GENFORS_ADMIN_PASSWORD = 'correct';
await createGenforsListener(generateSocket(), generateData({ password: 'correct' }));
expect(emit.mock.calls).toEqual([]);
expect(broadcast.mock.calls).toEqual([]);
});
});
afterEach(() => {
process.env.SDF_GENFORS_ADMIN_PASSWORD = '';
});
|
jest.mock('../../models/meeting');
jest.mock('../../utils');
const { emit, broadcast } = require('../../utils');
const { createGenfors } = require('../../models/meeting');
const { generateGenfors, generateSocket } = require('../../utils/generateTestData');
const { createGenfors: createGenforsListener } = require('../auth');
const generateData = data => Object.assign({}, data);
describe('admin', () => {
it('returns invalid admin password if incorrect', async () => {
process.env.SDF_GENFORS_ADMIN_PASSWORD = 'correct';
await createGenforsListener(generateSocket(), generateData());
expect(emit.mock.calls).toMatchSnapshot();
expect(broadcast.mock.calls).toEqual([]);
process.env.SDF_GENFORS_ADMIN_PASSWORD = '';
});
it('creates a new meeting if admin password is correct', async () => {
createGenfors.mockImplementation(async () => generateGenfors());
process.env.SDF_GENFORS_ADMIN_PASSWORD = 'correct';
await createGenforsListener(generateSocket(), generateData({ password: 'correct' }));
expect(emit.mock.calls).toEqual([]);
expect(broadcast.mock.calls).toEqual([]);
process.env.SDF_GENFORS_ADMIN_PASSWORD = '';
});
});
|
Refactor WebResourceManagerImpl to use WebResourceFormatters for javascript and css resources
- css resource parameter for IE conditional comments added
- also added support for several HTML attributes
git-svn-id: 3d1f0b8d955af71bf8e09c956c180519124e4717@22271 2c54a935-e501-0410-bc05-97a93f6bca70
|
package com.atlassian.plugin.webresource;
import junit.framework.TestCase;
import java.util.HashMap;
public class TestJavascriptWebResourceFormatter extends TestCase
{
private JavascriptWebResourceFormatter javascriptWebResourceFormatter;
protected void setUp() throws Exception
{
super.setUp();
javascriptWebResourceFormatter = new JavascriptWebResourceFormatter();
}
protected void tearDown() throws Exception
{
javascriptWebResourceFormatter = null;
super.tearDown();
}
public void testMatches()
{
assertTrue(javascriptWebResourceFormatter.matches("blah.js"));
assertFalse(javascriptWebResourceFormatter.matches("blah.css"));
}
public void testFormatResource()
{
final String resourceName = "atlassian.js";
final String url = "/confluence/download/resources/confluence.web.resources:ajs/atlassian.js";
assertEquals("<script type=\"text/javascript\" src=\"" + url + "\" ></script>\n",
javascriptWebResourceFormatter.formatResource(resourceName, url, new HashMap()));
}
}
|
package com.atlassian.plugin.webresource;
import junit.framework.TestCase;
import java.util.HashMap;
public class TestJavascriptWebResourceFormatter extends TestCase
{
private JavascriptWebResourceFormatter javascriptWebResourceFormatter;
protected void setUp() throws Exception
{
super.setUp();
javascriptWebResourceFormatter = new JavascriptWebResourceFormatter();
}
protected void tearDown() throws Exception
{
javascriptWebResourceFormatter = null;
super.tearDown();
}
public void testMatches()
{
assertTrue(javascriptWebResourceFormatter.matches("blah.js"));
assertFalse(javascriptWebResourceFormatter.matches("blah.css"));
}
public void testFormatResource()
{
final String resourceName = "atlassian.js";
final String url = "/confluence/download/resources/confluence.web.resources:ajs/atlassian.js";
assertEquals("<script type=\"text/javascript\" src=\"" + url + "\"></script>\n",
javascriptWebResourceFormatter.formatResource(resourceName, url, new HashMap()));
}
}
|
Fix imgur `https` to `http`.
|
'use strict';
angular.module('members').filter('getMemberIdByFullName', [
function() {
/**
* @param members A member object arrays.
* @param fullName A member full name that want to find, format is 'firstname-lastname'.
*
* @return A member object.
*/
return function(members, fullName) {
var nameAry = fullName.split('-');
var firstName = nameAry[0],
lastName = nameAry[1];
var i=0, len=members.length;
for (; i<len; i++) {
if (members[i].firstnameen === firstName &&
members[i].lastnameen === lastName) {
return members[i];
}
}
return null;
};
}
]).filter('thumbnail', [
function() {
return function(imgurUrl) {
if (imgurUrl === undefined) {
return null;
} else {
return (imgurUrl.slice(0, 26) + 'm' + imgurUrl.slice(26));
}
};
}
]);
|
'use strict';
angular.module('members').filter('getMemberIdByFullName', [
function() {
/**
* @param members A member object arrays.
* @param fullName A member full name that want to find, format is 'firstname-lastname'.
*
* @return A member object.
*/
return function(members, fullName) {
var nameAry = fullName.split('-');
var firstName = nameAry[0],
lastName = nameAry[1];
var i=0, len=members.length;
for (; i<len; i++) {
if (members[i].firstnameen === firstName &&
members[i].lastnameen === lastName) {
return members[i];
}
}
return null;
};
}
]).filter('thumbnail', [
function() {
return function(imgurUrl) {
if (imgurUrl === undefined) {
return null;
} else {
return (imgurUrl.slice(0, 27) + 'm' + imgurUrl.slice(27));
}
};
}
]);
|
Store and implement the necessary data and methods for storing information about the job and result set id.
git-svn-id: d02d9bbead102d27982d5b07d77674fa86f521a4@1142 eaa15691-b419-025a-1212-ee371bd00084
|
/*
* Copyright 2009-2010 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.uci.ics.asterix.aql.translator;
import edu.uci.ics.asterix.aql.base.Statement;
import edu.uci.ics.asterix.aql.expression.Query;
import edu.uci.ics.hyracks.api.dataset.ResultSetId;
import edu.uci.ics.hyracks.api.job.JobId;
public class QueryResult {
private final Query query;
private final ResultSetId resultSetId;
private JobId jobId;
public QueryResult(Query statement, ResultSetId resultSetId) {
this.query = statement;
this.resultSetId = resultSetId;
}
public void setJobId(JobId jobId) {
this.jobId = jobId;
}
public JobId getJobId() {
return jobId;
}
public Statement getStatement() {
return query;
}
public ResultSetId getResultId() {
return resultSetId;
}
}
|
/*
* Copyright 2009-2010 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.uci.ics.asterix.aql.translator;
import edu.uci.ics.asterix.aql.base.Statement;
import edu.uci.ics.asterix.aql.expression.Query;
public class QueryResult {
private final Query query;
private final String resultPath;
public QueryResult(Query statement, String resultPath) {
this.query = statement;
this.resultPath = resultPath;
}
public Statement getStatement() {
return query;
}
public String getResultPath() {
return resultPath;
}
}
|
Switch default to actual ES default (date_optional_time) and add TIME type
|
# ElasticMapping
# File: types.py
# Desc: base Elasticsearch types
class CallableDict(dict):
BASE = None
OVERRIDES = None
def __call__(self, overrides):
new_dict = CallableDict(self)
new_dict.OVERRIDES = overrides
new_dict.BASE = self
return new_dict
BASE_TYPE = {
'store': False,
'doc_values': False
}
STRING = CallableDict({
'type': 'string',
'index': 'analyzed'
})
FLOAT = CallableDict({
'type': 'float'
})
DOUBLE = CallableDict({
'type': 'double'
})
INTEGER = CallableDict({
'type': 'integer'
})
LONG = CallableDict({
'type': 'long'
})
SHORT = CallableDict({
'type': 'short'
})
BYTE = CallableDict({
'type': 'byte'
})
BOOLEAN = CallableDict({
'type': 'boolean'
})
DATE = CallableDict({
'type': 'date',
'format': 'date'
})
TIME = CallableDict({
'type': 'date',
'format': 'time'
})
DATETIME = CallableDict({
'type': 'date',
'format': 'date_optional_time'
})
TYPES = {
name: type
for name, type in locals().items()
if isinstance(type, CallableDict)
}
|
# ElasticMapping
# File: types.py
# Desc: base Elasticsearch types
class CallableDict(dict):
BASE = None
OVERRIDES = None
def __call__(self, overrides):
new_dict = CallableDict(self)
new_dict.OVERRIDES = overrides
new_dict.BASE = self
return new_dict
BASE_TYPE = {
'store': False,
'doc_values': False
}
STRING = CallableDict({
'type': 'string',
'index': 'analyzed'
})
FLOAT = CallableDict({
'type': 'float'
})
DOUBLE = CallableDict({
'type': 'double'
})
INTEGER = CallableDict({
'type': 'integer'
})
LONG = CallableDict({
'type': 'long'
})
SHORT = CallableDict({
'type': 'short'
})
BYTE = CallableDict({
'type': 'byte'
})
BOOLEAN = CallableDict({
'type': 'boolean'
})
DATE = CallableDict({
'type': 'date',
'format': 'date'
})
DATETIME = CallableDict({
'type': 'date',
'format': 'date_hour_minute_second_fraction'
})
TYPES = {
name: type
for name, type in locals().items()
if isinstance(type, CallableDict)
}
|
Add OPTIONS to hartbeat API
|
package org.jboss.aerogear.unifiedpush.rest;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.GET;
import javax.ws.rs.OPTIONS;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import com.datastax.driver.core.utils.UUIDs;
import com.qmino.miredot.annotations.ReturnType;
@Path("/hartbeat")
public class HartbeatEndpoint extends AbstractBaseEndpoint {
@OPTIONS
@ReturnType("java.lang.Void")
public Response crossOriginForApplication(@Context HttpHeaders headers) {
return appendPreflightResponseHeaders(headers, Response.ok()).build();
}
/**
* Hartbeat Endpoint
*
* @return Hartbeat in form of time-based UUID.
* @statuscode 200 Successful response for your request
*/
@GET
@Produces(MediaType.APPLICATION_JSON)
@ReturnType("java.lang.String")
public Response hartbeat(@Context HttpServletRequest request) {
return appendAllowOriginHeader(Response.ok().entity(quote(UUIDs.timeBased().toString())), request);
}
}
|
package org.jboss.aerogear.unifiedpush.rest;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import com.datastax.driver.core.utils.UUIDs;
import com.qmino.miredot.annotations.ReturnType;
@Path("/hartbeat")
public class HartbeatEndpoint extends AbstractBaseEndpoint {
/**
* Hartbeat Endpoint
*
* @return Hartbeat in form of time-based UUID.
* @statuscode 200 Successful response for your request
*/
@GET
@Produces(MediaType.APPLICATION_JSON)
@ReturnType("java.lang.String")
public Response hartbeat(@Context HttpServletRequest request) {
return appendAllowOriginHeader(Response.ok().entity(quote(UUIDs.timeBased().toString())), request);
}
}
|
Revert backend template finder code
|
from django.utils.safestring import mark_safe
from django.template.loader import render_to_string
from armstrong.utils.backends import GenericBackend
def get_layout_template_name(model, name):
ret = []
for a in model.__class__.mro():
if not hasattr(a, "_meta"):
continue
ret.append("layout/%s/%s/%s.html" % (a._meta.app_label,
a._meta.object_name.lower(), name))
return ret
def render_model(object, name, dictionary=None, context_instance=None):
dictionary = dictionary or {}
dictionary["object"] = object
return mark_safe(render_to_string(get_layout_template_name(object, name),
dictionary=dictionary, context_instance=context_instance))
|
from django.utils.safestring import mark_safe
from django.template.loader import render_to_string
from armstrong.utils.backends import GenericBackend
template_finder = GenericBackend('ARMSTRONG_LAYOUT_TEMPLATE_FINDER',
defaults='armstrong.core.arm_layout.utils.get_layout_template_name')\
.get_backend
def get_layout_template_name(model, name):
ret = []
for a in model.__class__.mro():
if not hasattr(a, "_meta"):
continue
ret.append("layout/%s/%s/%s.html" % (a._meta.app_label,
a._meta.object_name.lower(), name))
return ret
def render_model(object, name, dictionary=None, context_instance=None):
dictionary = dictionary or {}
dictionary["object"] = object
return mark_safe(render_to_string(template_finder(object, name),
dictionary=dictionary, context_instance=context_instance))
|
[CovManager] Add redirect for / to collections
|
from django.conf.urls import patterns, include, url
from rest_framework import routers
from covmanager import views
router = routers.DefaultRouter()
router.register(r'collections', views.CollectionViewSet, base_name='collections')
router.register(r'repositories', views.RepositoryViewSet, base_name='repositories')
urlpatterns = patterns('',
url(r'^rest/api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^$', views.index, name='index'),
url(r'^repositories/', views.repositories, name="repositories"),
url(r'^collections/$', views.collections, name="collections"),
url(r'^collections/api/$', views.CollectionViewSet.as_view({'get': 'list'}), name="collections_api"),
url(r'^collections/(?P<collectionid>\d+)/browse/$', views.collections_browse, name="collections_browse"),
url(r'^collections/(?P<collectionid>\d+)/browse/api/(?P<path>.*)', views.collections_browse_api, name="collections_browse_api"),
url(r'^rest/', include(router.urls)),
)
|
from django.conf.urls import patterns, include, url
from rest_framework import routers
from covmanager import views
router = routers.DefaultRouter()
router.register(r'collections', views.CollectionViewSet, base_name='collections')
router.register(r'repositories', views.RepositoryViewSet, base_name='repositories')
urlpatterns = patterns('',
url(r'^rest/api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^repositories/', views.repositories, name="repositories"),
url(r'^collections/$', views.collections, name="collections"),
url(r'^collections/api/$', views.CollectionViewSet.as_view({'get': 'list'}), name="collections_api"),
url(r'^collections/(?P<collectionid>\d+)/browse/$', views.collections_browse, name="collections_browse"),
url(r'^collections/(?P<collectionid>\d+)/browse/api/(?P<path>.*)', views.collections_browse_api, name="collections_browse_api"),
url(r'^rest/', include(router.urls)),
)
|
base_report_auto_create_qweb: Change authors to new ones
|
# -*- coding: utf-8 -*-
# Authors: See README.RST for Contributors
# Copyright 2015-2016 See __openerp__.py for Authors
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
"name": "Report qweb auto generation",
"version": "9.0.1.0.0",
"depends": [
"report",
],
"external_dependencies": {
"python": [
"unidecode",
],
},
"author": "AvanzOSC, "
"Tecnativa, "
"Odoo Community Association (OCA), ",
"website": "https://github.com/OCA/server-tools",
"license": "AGPL-3",
"contributors": [
"Oihane Crucelaegui <oihanecrucelaegi@avanzosc.es>",
"Pedro M. Baeza <pedro.baeza@serviciosbaeza.com>",
"Ana Juaristi <anajuaristi@avanzosc.es>",
],
"category": "Tools",
"data": [
"wizard/report_duplicate_view.xml",
"views/report_xml_view.xml",
],
'installable': False,
}
|
# -*- coding: utf-8 -*-
# Authors: See README.RST for Contributors
# Copyright 2015-2016 See __openerp__.py for Authors
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
"name": "Report qweb auto generation",
"version": "9.0.1.0.0",
"depends": [
"report",
],
"external_dependencies": {
"python": [
"unidecode",
],
},
"author": "OdooMRP team, "
"AvanzOSC, "
"Serv. Tecnol. Avanzados - Pedro M. Baeza, "
"Odoo Community Association (OCA), ",
"website": "http://www.odoomrp.com",
"license": "AGPL-3",
"contributors": [
"Oihane Crucelaegui <oihanecrucelaegi@avanzosc.es>",
"Pedro M. Baeza <pedro.baeza@serviciosbaeza.com>",
"Ana Juaristi <anajuaristi@avanzosc.es>",
],
"category": "Tools",
"data": [
"wizard/report_duplicate_view.xml",
"views/report_xml_view.xml",
],
'installable': False,
}
|
Fix Firefox --headless config missing
|
'use strict';
module.exports = {
test_page: 'tests/index.html?hidepassed',
disable_watching: true,
launch_in_ci: ['Chrome'],
launch_in_dev: ['Chrome'],
browser_start_timeout: 120,
browser_args: {
Chrome: {
ci: [
// --no-sandbox is needed when running Chrome inside a container
process.env.CI ? '--no-sandbox' : null,
'--headless',
'--disable-dev-shm-usage',
'--disable-software-rasterizer',
'--mute-audio',
'--remote-debugging-port=0',
'--window-size=1440,900',
].filter(Boolean),
},
Firefox: {
ci: ['--headless'],
},
},
};
|
'use strict';
module.exports = {
test_page: 'tests/index.html?hidepassed',
disable_watching: true,
launch_in_ci: ['Chrome'],
launch_in_dev: ['Chrome'],
browser_start_timeout: 120,
browser_args: {
Chrome: {
ci: [
// --no-sandbox is needed when running Chrome inside a container
process.env.CI ? '--no-sandbox' : null,
'--headless',
'--disable-dev-shm-usage',
'--disable-software-rasterizer',
'--mute-audio',
'--remote-debugging-port=0',
'--window-size=1440,900',
].filter(Boolean),
},
},
};
|
Return exact results by default
|
export function filterByText(data, textFilter) {
if (textFilter === '') {
return data;
}
// case-insensitive
textFilter = textFilter.toLowerCase();
const exactMatches = [];
const substringMatches = [];
data.forEach(i => {
const name = i.name.toLowerCase();
if (name.split(' ').includes(textFilter)) {
exactMatches.push(i);
} else if (name.includes(textFilter)) {
substringMatches.push(i);
}
});
// return in ascending order
return substringMatches.concat(exactMatches);
}
export function filterByCheckbox(data, checkboxFilters) {
let filtered = data;
Object.keys(checkboxFilters).forEach(i => {
const currentFilters = Object.keys(checkboxFilters[i]).filter(j => checkboxFilters[i][j]);
if (currentFilters.length) {
filtered = filtered.filter(j => currentFilters.includes(j.filterable[i]));
}
});
return filtered;
}
|
export function filterByText(data, textFilter) {
if (textFilter === '') {
return data;
}
// case-insensitive
textFilter = textFilter.toLowerCase();
const exactMatches = [];
const substringMatches = [];
data.forEach(i => {
const name = i.name.toLowerCase();
if (name.split(' ').includes(textFilter)) {
exactMatches.push(i);
} else if (name.includes(textFilter)) {
substringMatches.push(i);
}
});
return exactMatches.concat(substringMatches);
}
export function filterByCheckbox(data, checkboxFilters) {
let filtered = data;
Object.keys(checkboxFilters).forEach(i => {
const currentFilters = Object.keys(checkboxFilters[i]).filter(j => checkboxFilters[i][j]);
if (currentFilters.length) {
filtered = filtered.filter(j => currentFilters.includes(j.filterable[i]));
}
});
return filtered;
}
|
Add Count method to StoresMap object
|
package tokay
import (
"sync"
)
type (
// routeStore stores route paths and the corresponding handlers.
routeStore interface {
Add(key string, data interface{}) int
Get(key string, pvalues []string) (data interface{}, pnames []string)
String() string
}
storesMap struct {
sync.RWMutex
M map[string]routeStore
}
)
func newStoresMap() *storesMap {
return &storesMap{M: make(map[string]routeStore)}
}
func (m *storesMap) Set(key string, val routeStore) {
m.Lock()
m.M[key] = val
m.Unlock()
}
func (m *storesMap) Range(fn func(key string, value routeStore)) {
m.Lock()
for key, value := range m.M {
fn(key, value)
}
m.Unlock()
}
func (m *storesMap) Get(key string) routeStore {
m.RLock()
v := m.M[key]
m.RUnlock()
return v
}
func (m *storesMap) Count() int {
m.RLock()
count := len(m.M)
m.RUnlock()
return count
}
|
package tokay
import (
"sync"
)
type (
// routeStore stores route paths and the corresponding handlers.
routeStore interface {
Add(key string, data interface{}) int
Get(key string, pvalues []string) (data interface{}, pnames []string)
String() string
}
storesMap struct {
sync.RWMutex
M map[string]routeStore
}
)
func newStoresMap() *storesMap {
return &storesMap{M: make(map[string]routeStore)}
}
func (m *storesMap) Set(key string, val routeStore) {
m.Lock()
m.M[key] = val
m.Unlock()
}
func (m *storesMap) Range(fn func(key string, value routeStore)) {
m.Lock()
for key, value := range m.M {
fn(key, value)
}
m.Unlock()
}
func (m *storesMap) Get(key string) routeStore {
m.RLock()
v := m.M[key]
m.RUnlock()
return v
}
|
Use single quotes instead of a double quotes
|
var gulp = require('gulp');
var nightwatch = require('../');
gulp.task('default', function() {
gulp.src('')
.pipe(nightwatch({
configFile: 'nightwatch.json'
}));
});
gulp.task('withCliArgs:array', function() {
gulp.src('')
.pipe(nightwatch({
configFile: 'nightwatch.json',
cliArgs: [ '--env chrome', '--tag sandbox' ]
}));
});
gulp.task('withCliArgs:object', function() {
gulp.src('')
.pipe(nightwatch({
configFile: 'nightwatch.json',
cliArgs: {
env: 'chrome',
tag: 'sandbox'
}
}));
});
|
var gulp = require('gulp');
var nightwatch = require('../');
gulp.task('default', function() {
gulp.src('')
.pipe(nightwatch({
configFile: 'nightwatch.json'
}));
});
gulp.task('withCliArgs:array', function() {
gulp.src('')
.pipe(nightwatch({
configFile: 'nightwatch.json',
cliArgs: [ '--env chrome', '--tags sandbox' ]
}));
});
gulp.task('withCliArgs:object', function() {
gulp.src('')
.pipe(nightwatch({
configFile: 'nightwatch.json',
cliArgs: {
env: "chrome",
tags: "sandbox"
}
}));
});
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.