text
stringlengths
16
4.96k
positive
stringlengths
321
2.24k
negative
stringlengths
310
2.21k
Use java.lang.Long.MAX_VALUE as best example of loss of precision.
package com.redseal.featureset; public class SomeClass extends SomeAbstractClass { public SomeClass() { theInt = 42; theLong = java.lang.Long.MAX_VALUE; theString = "Just some class."; } public SomeClass(int x, long y, String z) { theInt = x; theLong = y; theString = z; } private int theInt; private long theLong; private String theString; public int getInt() { return theInt; } public long getLong() { return theLong; } public String getString() { return theString; } public void setInt(int x) { theInt = x; } public void setLong(long x) { theLong = x; } public void setString(String x) { theString = x; } }
package com.redseal.featureset; public class SomeClass extends SomeAbstractClass { public SomeClass() { theInt = 42; theLong = 0x0123456789abcdefL; theString = "Just some class."; } public SomeClass(int x, long y, String z) { theInt = x; theLong = y; theString = z; } private int theInt; private long theLong; private String theString; public int getInt() { return theInt; } public long getLong() { return theLong; } public String getString() { return theString; } public void setInt(int x) { theInt = x; } public void setLong(long x) { theLong = x; } public void setString(String x) { theString = x; } }
Add semicolon to the return statement
import mapKeys from 'lodash/mapKeys'; import mapValues from 'lodash/mapValues'; import camelCase from 'lodash/camelCase'; import snakeCase from 'lodash/snakeCase'; function ensureTypeSnakeCase(value) { if (typeof value === 'string') { return snakeCase(value); } return value; } function assessmentMetaDataState(data) { const blankState = { assessment: false, assessmentIds: [], masteryModel: null, randomize: false, }; if (typeof data.assessmentmetadata === 'undefined') { return blankState; } // Data is from a serializer for a one to many key, so it will return an array of length 0 or 1 const assessmentMetaData = data.assessmentmetadata[0]; if (!assessmentMetaData) { return blankState; } const assessmentIds = assessmentMetaData.assessment_item_ids; const masteryModel = mapValues(assessmentMetaData.mastery_model, ensureTypeSnakeCase); if (!assessmentIds.length || !Object.keys(masteryModel).length) { return blankState; } return { assessment: true, assessmentIds, masteryModel, randomize: assessmentMetaData.randomize, }; } function convertKeysToCamelCase(object) { return mapKeys(object, (value, key) => camelCase(key)); } function convertKeysToSnakeCase(object) { return mapKeys(object, (value, key) => snakeCase(key)); } export { assessmentMetaDataState, convertKeysToCamelCase, convertKeysToSnakeCase };
import mapKeys from 'lodash/mapKeys'; import mapValues from 'lodash/mapValues'; import camelCase from 'lodash/camelCase'; import snakeCase from 'lodash/snakeCase'; function ensureTypeSnakeCase(value) { if (typeof value === 'string') { return snakeCase(value); } return value; } function assessmentMetaDataState(data) { const blankState = { assessment: false, assessmentIds: [], masteryModel: null, randomize: false, }; if (typeof data.assessmentmetadata === 'undefined') { return blankState } // Data is from a serializer for a one to many key, so it will return an array of length 0 or 1 const assessmentMetaData = data.assessmentmetadata[0]; if (!assessmentMetaData) { return blankState; } const assessmentIds = assessmentMetaData.assessment_item_ids; const masteryModel = mapValues(assessmentMetaData.mastery_model, ensureTypeSnakeCase); if (!assessmentIds.length || !Object.keys(masteryModel).length) { return blankState; } return { assessment: true, assessmentIds, masteryModel, randomize: assessmentMetaData.randomize, }; } function convertKeysToCamelCase(object) { return mapKeys(object, (value, key) => camelCase(key)); } function convertKeysToSnakeCase(object) { return mapKeys(object, (value, key) => snakeCase(key)); } export { assessmentMetaDataState, convertKeysToCamelCase, convertKeysToSnakeCase };
Add a comment for the slow learners like me.
/* $This file is distributed under the terms of the license in /doc/license.txt$ */ package edu.cornell.mannlib.vitro.webapp.servlet.setup; import javax.servlet.ServletContext; /** * Provide a mechanism so a ServletContextListener can set a flag that tells * other ServletContextListeners not to run. * * The listener that detects a problem should still throw an exception, so * Tomcat will declare that the startup has failed and mark the application as * not runnable. However, Tomcat will still run the other listeners before * giving up. Hence, the need for this flag. * * If the other listeners are looking for this flag, they can (and should) * decide to simply exit rather than attempting to initialize. */ public class AbortStartup { private static final String ATTRIBUTE_NAME = AbortStartup.class.getName(); /** * Sets a context attribute to prevent other context listeners from running. */ public static void abortStartup(ServletContext context) { context.setAttribute(ATTRIBUTE_NAME, new Boolean(true)); } /** * Checks whether a previous context listener has caused startup to be aborted. */ public static boolean isStartupAborted(ServletContext context) { return (context.getAttribute(ATTRIBUTE_NAME) != null); } }
/* $This file is distributed under the terms of the license in /doc/license.txt$ */ package edu.cornell.mannlib.vitro.webapp.servlet.setup; import javax.servlet.ServletContext; public class AbortStartup { private static final String ATTRIBUTE_NAME = AbortStartup.class.getName(); /** * Sets a context attribute to prevent other context listeners from running. */ public static void abortStartup(ServletContext context) { context.setAttribute(ATTRIBUTE_NAME, new Boolean(true)); } /** * Checks whether a previous context listener has caused startup to be aborted. */ public static boolean isStartupAborted(ServletContext context) { return (context.getAttribute(ATTRIBUTE_NAME) != null); } }
Resolve already returns an error.
package monk import ( "fmt" ) type Resolution struct { Resolved []string Seen []string } // Resolve the asset at assetPath and its dependencies. func (r *Resolution) Resolve(assetPath string, context *Context) error { r.Seen = append(r.Seen, assetPath) asset, err := context.lookup(assetPath) if err != nil { return err } for _, edge := range asset.Dependencies { if !contains(edge, r.Resolved) { if contains(edge, r.Seen) { return fmt.Errorf("circular dependency detected: %s <-> %s", assetPath, edge) } if err := r.Resolve(edge, context); err != nil { return err } } } r.Resolved = append(r.Resolved, assetPath) return nil } func contains(needle string, haystack []string) bool { found := false for _, item := range haystack { if needle == item { found = true break } } return found }
package monk import ( "fmt" ) type Resolution struct { Resolved []string Seen []string } // Resolve the asset at assetPath and its dependencies. // // TODO should return error func (r *Resolution) Resolve(assetPath string, context *Context) error { r.Seen = append(r.Seen, assetPath) asset, err := context.lookup(assetPath) if err != nil { return err } for _, edge := range asset.Dependencies { if !contains(edge, r.Resolved) { if contains(edge, r.Seen) { return fmt.Errorf("circular dependency detected: %s <-> %s", assetPath, edge) } if err := r.Resolve(edge, context); err != nil { return err } } } r.Resolved = append(r.Resolved, assetPath) return nil } func contains(needle string, haystack []string) bool { found := false for _, item := range haystack { if needle == item { found = true break } } return found }
Comment out test for loading deployment context until the load order can be fixed.
package com.netflix.config; import org.junit.Test; import static org.junit.Assert.assertEquals; public class CascadedPropertiesWithDeploymentContextTest { //@Test public void testLoadCascadedPropertiesConfigDeployment() throws Exception { ConfigurationManager.getConfigInstance().setProperty("archaius.deployment.environment", "test"); ConfigurationManager.getConfigInstance().setProperty("archaius.deployment.region", "us-east-1"); ConfigurationManager.loadCascadedPropertiesFromResources("test"); assertEquals("9", ConfigurationManager.getConfigInstance().getProperty("com.netflix.config.samples.SampleApp.SampleBean.numSeeds")); assertEquals("1", ConfigurationManager.getConfigInstance().getProperty("cascaded.property")); ConfigurationManager.loadAppOverrideProperties("override"); assertEquals("200", ConfigurationManager.getConfigInstance().getProperty("cascaded.property")); } }
package com.netflix.config; import org.junit.Test; import static org.junit.Assert.assertEquals; public class CascadedPropertiesWithDeploymentContextTest { @Test public void testLoadCascadedPropertiesConfigDeployment() throws Exception { ConfigurationBasedDeploymentContext context = new ConfigurationBasedDeploymentContext(); ConfigurationManager.getConfigInstance().setProperty("archaius.deployment.environment", "test"); ConfigurationManager.getConfigInstance().setProperty("archaius.deployment.region", "us-east-1"); ConfigurationManager.loadCascadedPropertiesFromResources("test"); assertEquals("9", ConfigurationManager.getConfigInstance().getProperty("com.netflix.config.samples.SampleApp.SampleBean.numSeeds")); assertEquals("1", ConfigurationManager.getConfigInstance().getProperty("cascaded.property")); ConfigurationManager.loadAppOverrideProperties("override"); assertEquals("200", ConfigurationManager.getConfigInstance().getProperty("cascaded.property")); } }
Add URL config to HTTP client adapter itest
/* * This file is provided to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.basho.riak.client.raw.itest; import java.io.IOException; import com.basho.riak.client.http.Hosts; import com.basho.riak.client.raw.RawClient; import com.basho.riak.client.raw.http.HTTPClientConfig; import com.basho.riak.client.raw.http.HTTPRiakClientFactory; /** * @author russell * */ public class ITestHTTPClientAdapter extends ITestRawClientAdapter { /* * (non-Javadoc) * * @see com.basho.riak.client.raw.itest.ITestRawClientAdapter#getClient() */ @Override protected RawClient getClient() throws IOException { HTTPClientConfig config = new HTTPClientConfig.Builder().withUrl(Hosts.RIAK_URL).build(); return HTTPRiakClientFactory.getInstance().newClient(config); } }
/* * This file is provided to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.basho.riak.client.raw.itest; import java.io.IOException; import com.basho.riak.client.http.Hosts; import com.basho.riak.client.raw.RawClient; import com.basho.riak.client.raw.http.HTTPClientConfig; import com.basho.riak.client.raw.http.HTTPRiakClientFactory; /** * @author russell * */ public class ITestHTTPClientAdapter extends ITestRawClientAdapter { /* * (non-Javadoc) * * @see com.basho.riak.client.raw.itest.ITestRawClientAdapter#getClient() */ @Override protected RawClient getClient() throws IOException { HTTPClientConfig config = new HTTPClientConfig.Builder().withHost(Hosts.RIAK_HOST).build(); return HTTPRiakClientFactory.getInstance().newClient(config); } }
Set final version of stream scenario
package io.scalecube.services.benchmarks.service; import io.scalecube.services.api.ServiceMessage; import java.util.concurrent.Callable; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import reactor.core.scheduler.Schedulers; public class BenchmarkServiceImpl implements BenchmarkService { @Override public Mono<Void> requestVoid(ServiceMessage request) { return Mono.empty(); } @Override public Mono<ServiceMessage> requestOne(ServiceMessage message) { Callable<ServiceMessage> callable = () -> { long value = System.currentTimeMillis(); return ServiceMessage.from(message) .header(SERVICE_RECV_TIME, value) .header(SERVICE_SEND_TIME, value) .build(); }; return Mono.fromCallable(callable); } @Override public Flux<ServiceMessage> infiniteStream(ServiceMessage message) { Callable<ServiceMessage> callable = () -> ServiceMessage.from(message) .header(SERVICE_SEND_TIME, System.currentTimeMillis()) .build(); return Mono.fromCallable(callable) .subscribeOn(Schedulers.parallel()) .repeat() .onBackpressureDrop(); } }
package io.scalecube.services.benchmarks.service; import io.scalecube.services.api.ServiceMessage; import java.util.concurrent.Callable; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import reactor.core.scheduler.Schedulers; public class BenchmarkServiceImpl implements BenchmarkService { @Override public Mono<Void> requestVoid(ServiceMessage request) { return Mono.empty(); } @Override public Mono<ServiceMessage> requestOne(ServiceMessage message) { Callable<ServiceMessage> callable = () -> { long value = System.currentTimeMillis(); return ServiceMessage.from(message) .header(SERVICE_RECV_TIME, value) .header(SERVICE_SEND_TIME, value) .build(); }; return Mono.fromCallable(callable); } @Override public Flux<ServiceMessage> infiniteStream(ServiceMessage message) { Callable<ServiceMessage> callable = () -> ServiceMessage.from(message) .header(SERVICE_SEND_TIME, System.currentTimeMillis()) .build(); return Mono.fromCallable(callable) .subscribeOn(Schedulers.newSingle("infiniteStream")) .repeat() .onBackpressureDrop(); } }
Remove source invisible value in Trl-Paragraphs Invisible is only set by chained component
<?php class Kwc_Paragraphs_Trl_Generator extends Kwc_Chained_Trl_Generator { protected function _formatConfig($parentData, $row) { $ret = parent::_formatConfig($parentData, $row); if (isset($ret['invisible'])) unset($ret['invisible']); $r = $this->_getRow($parentData->dbId.$this->getIdSeparator().$this->_getIdFromRow($row)); if (!$r || !$r->visible) { $ret['invisible'] = true; } return $ret; } protected function _createData($parentData, $row, $select) { $ret = parent::_createData($parentData, $row, $select); if ($select->getPart(Kwf_Component_Select::IGNORE_VISIBLE) !== true) { if (isset($ret->invisible) && $ret->invisible) { $ret = null; } } return $ret; } }
<?php class Kwc_Paragraphs_Trl_Generator extends Kwc_Chained_Trl_Generator { protected function _formatConfig($parentData, $row) { $ret = parent::_formatConfig($parentData, $row); $r = $this->_getRow($parentData->dbId.$this->getIdSeparator().$this->_getIdFromRow($row)); if (!$r || !$r->visible) { $ret['invisible'] = true; } return $ret; } protected function _createData($parentData, $row, $select) { $ret = parent::_createData($parentData, $row, $select); if ($select->getPart(Kwf_Component_Select::IGNORE_VISIBLE) !== true) { if (isset($ret->invisible) && $ret->invisible) { $ret = null; } } return $ret; } }
Fix examples by adding package to path
import json import os import shutil import sys sys.path.insert(0, os.path.abspath('.')) from scipy_data_fitting import Plot def save_example_fit(fit): """ Save fit result to a json file and a plot to an svg file. """ json_directory = os.path.join('examples', 'json') plot_directory = os.path.join('examples', 'plots') if not os.path.isdir(json_directory): os.makedirs(json_directory) if not os.path.isdir(plot_directory): os.makedirs(plot_directory) fit.to_json(os.path.join(json_directory, fit.name + '.json'), meta=fit.metadata) plot = Plot(fit) plot.save(os.path.join(plot_directory, fit.name + '.svg')) plot.close() def reset_directory(directory): """ Remove `directory` if it exists, then create it if it doesn't exist. """ if os.path.isdir(directory): shutil.rmtree(directory) if not os.path.isdir(directory): os.makedirs(directory)
import json import os import shutil from scipy_data_fitting import Plot def save_example_fit(fit): """ Save fit result to a json file and a plot to an svg file. """ json_directory = os.path.join('examples', 'json') plot_directory = os.path.join('examples', 'plots') if not os.path.isdir(json_directory): os.makedirs(json_directory) if not os.path.isdir(plot_directory): os.makedirs(plot_directory) fit.to_json(os.path.join(json_directory, fit.name + '.json'), meta=fit.metadata) plot = Plot(fit) plot.save(os.path.join(plot_directory, fit.name + '.svg')) plot.close() def reset_directory(directory): """ Remove `directory` if it exists, then create it if it doesn't exist. """ if os.path.isdir(directory): shutil.rmtree(directory) if not os.path.isdir(directory): os.makedirs(directory)
Fix bug where the placeholder would flex out Might be related to shady DOM or <slot> hybrid mode.
/*global Polymer, Cosmoz*/ (function () { 'use strict'; Polymer({ is: 'cosmoz-bottom-bar-view', behaviors: [ Cosmoz.ViewInfoBehavior, Polymer.IronResizableBehavior ], properties: { overflowing: { type: Boolean, value: false, reflectToAttribute: true }, scroller: { type: Object } }, listeners: { 'iron-resize': '_onResize' }, attached: function () { this.scroller = this.$.scroller; }, _onResize: function () { var scrollerSizer = this.$.scrollerSizer; // HACK(pasleq): ensure scrollerSizer is sized correctly. scrollerSizer.style.minHeight = ''; this.async(function () { if (scrollerSizer.scrollHeight > scrollerSizer.offsetHeight) { scrollerSizer.style.minHeight = scrollerSizer.scrollHeight + 'px'; } }); }, _getPadding: function (desktop) { // if (desktop) { // return; // } return 'padding-bottom: ' + this.$.bar.barHeight + 'px'; }, _getBarHeight: function (desktop) { var height = this.$.bar.barHeight; return [ 'max-height: ' + height + 'px', 'min-height: ' + height + 'px' ].join(';'); } }); }());
/*global Polymer, Cosmoz*/ (function () { 'use strict'; Polymer({ is: 'cosmoz-bottom-bar-view', behaviors: [ Cosmoz.ViewInfoBehavior, Polymer.IronResizableBehavior ], properties: { overflowing: { type: Boolean, value: false, reflectToAttribute: true }, scroller: { type: Object } }, listeners: { 'iron-resize': '_onResize' }, attached: function () { this.scroller = this.$.scroller; }, _onResize: function () { var scrollerSizer = this.$.scrollerSizer; // HACK(pasleq): ensure scrollerSizer is sized correctly. scrollerSizer.style.minHeight = ''; this.async(function () { if (scrollerSizer.scrollHeight > scrollerSizer.offsetHeight) { scrollerSizer.style.minHeight = scrollerSizer.scrollHeight + 'px'; } }); }, _getPadding: function (desktop) { // if (desktop) { // return; // } return 'padding-bottom: ' + this.$.bar.barHeight + 'px'; }, _getBarHeight: function (desktop) { return 'min-height: ' + this.$.bar.barHeight + 'px'; } }); }());
Update Arch package to 2.7
# # Biicode Arch Linux package settings. # # Check PKGBUILD_template docs for those settings and # what they mean. # def settings(): return { "version": "2.7", "release_number": "1", "arch_deps": ["cmake>=3.0.2", "zlib", "glibc", "sqlite", "wget", "python2-pmw" ], "debian_deps": ["zlib1g", "libc-bin", "libsqlite3-0", "wget", "lib32z1", "python-tk" ] } if __name__ == '__main__': print(settings())
# # Biicode Arch Linux package settings. # # Check PKGBUILD_template docs for those settings and # what they mean. # def settings(): return { "version": "2.6.1", "release_number": "1", "arch_deps": ["cmake>=3.0.2", "zlib", "glibc", "sqlite", "wget", "python2-pmw" ], "debian_deps": ["zlib1g", "libc-bin", "libsqlite3-0", "wget", "lib32z1", "python-tk" ] } if __name__ == '__main__': print(settings())
Use @dachary's much clearer regex to validate codenames
from flask_babel import gettext from flask_wtf import FlaskForm from wtforms import PasswordField from wtforms.validators import InputRequired, Regexp, Length from db import Source class LoginForm(FlaskForm): codename = PasswordField('codename', validators=[ InputRequired(message=gettext('This field is required.')), Length(1, Source.MAX_CODENAME_LEN, message=gettext('Field must be between 1 and ' '{max_codename_len} characters long. '.format( max_codename_len=Source.MAX_CODENAME_LEN))), # Make sure to allow dashes since some words in the wordlist have them Regexp(r'[\sA-Za-z0-9-]+$', message=gettext('Invalid input.')) ])
from flask_babel import gettext from flask_wtf import FlaskForm from wtforms import PasswordField from wtforms.validators import InputRequired, Regexp, Length from db import Source class LoginForm(FlaskForm): codename = PasswordField('codename', validators=[ InputRequired(message=gettext('This field is required.')), Length(1, Source.MAX_CODENAME_LEN, message=gettext('Field must be between 1 and ' '{max_codename_len} characters long. '.format( max_codename_len=Source.MAX_CODENAME_LEN))), # The regex here allows either whitespace (\s) or # alphanumeric characters (\W) except underscore (_) Regexp(r'(\s|[^\W_])+$', message=gettext('Invalid input.')) ])
Add an additional default redirect_uri (runserver's default port)
# -*- coding: utf-8 -*- # Generated by Django 1.11.3 on 2017-07-26 20:29 from __future__ import unicode_literals from django.db import migrations from oauth2_provider.models import Application class Migration(migrations.Migration): def add_default_application(apps, schema_editor): Application.objects.create( name="OSM Export Tool UI", redirect_uris= "http://localhost/authorized http://localhost:8080/authorized http://localhost:8000/authorized", client_type=Application.CLIENT_PUBLIC, authorization_grant_type=Application.GRANT_IMPLICIT, skip_authorization=True) dependencies = [ ("oauth2_provider", "0005_auto_20170514_1141"), ] operations = [ migrations.RunPython(add_default_application), ]
# -*- coding: utf-8 -*- # Generated by Django 1.11.3 on 2017-07-26 20:29 from __future__ import unicode_literals from django.db import migrations from oauth2_provider.models import Application class Migration(migrations.Migration): def add_default_application(apps, schema_editor): Application.objects.create( name="OSM Export Tool UI", redirect_uris= "http://localhost/authorized http://localhost:8080/authorized", client_type=Application.CLIENT_PUBLIC, authorization_grant_type=Application.GRANT_IMPLICIT, skip_authorization=True) dependencies = [ ("oauth2_provider", "0005_auto_20170514_1141"), ] operations = [ migrations.RunPython(add_default_application), ]
Change markers iteration to lambda function
/** * @fileoverview Define the map component of the map module. */ 'use strict'; angular.module('map').component('mapComponent', { templateUrl: 'map/map.template.html', controller: function($scope, $http) { let googleMapOption = { zoom: 4, center: new google.maps.LatLng(25, 80) }; $scope.gMap = new google.maps.Map(document.getElementById('map-container'), googleMapOption); // Add a marker to the map const addMarker = function(marker) { new google.maps.Marker({ map: $scope.gMap, position: new google.maps.LatLng(marker.lat, marker.long), title: marker.animal }); }; // Iterate over the markers json and add all of them to the map $http.get('map/markers/markers.json').then((response) => angular.forEach(response.data, addMarker)); } });
/** * @fileoverview Define the map component of the map module. */ 'use strict'; angular.module('map').component('mapComponent', { templateUrl: 'map/map.template.html', controller: function($scope, $http) { let googleMapOption = { zoom: 4, center: new google.maps.LatLng(25, 80) }; $scope.gMap = new google.maps.Map(document.getElementById('map-container'), googleMapOption); // Add a marker to the map const addMarker = function(marker) { new google.maps.Marker({ map: $scope.gMap, position: new google.maps.LatLng(marker.lat, marker.long), title: marker.animal }); }; // Iterate over the markers json and add all of them to the map $http.get('map/markers/markers.json').then(function(response){ angular.forEach(response.data, function(marker){ addMarker(marker); }); }); } });
Make brand and party available to tests.
# -*- coding: utf-8 -*- from unittest import TestCase from byceps.application import create_app from byceps.blueprints.brand.models import Brand from byceps.blueprints.party.models import Party from byceps.database import db class AbstractAppTestCase(TestCase): def setUp(self): self.app = create_app('test', initialize=False) self.db = db db.app = self.app db.drop_all() db.create_all() self.create_brand_and_party() self.client = self.app.test_client() def create_brand_and_party(self): self.brand = Brand(id='acme', title='ACME') db.session.add(self.brand) self.party = Party(id='acme-2014', brand=self.brand, title='ACME 2014') db.session.add(self.party) db.session.commit() def tearDown(self): db.session.remove() db.drop_all()
# -*- coding: utf-8 -*- from unittest import TestCase from byceps.application import create_app from byceps.blueprints.brand.models import Brand from byceps.blueprints.party.models import Party from byceps.database import db class AbstractAppTestCase(TestCase): def setUp(self): self.app = create_app('test', initialize=False) self.db = db db.app = self.app db.drop_all() db.create_all() self.create_brand_and_party() self.client = self.app.test_client() def create_brand_and_party(self): brand = Brand(id='acme', title='ACME') db.session.add(brand) party = Party(id='acme-2014', brand=brand, title='ACME 2014') db.session.add(party) db.session.commit() def tearDown(self): db.session.remove() db.drop_all()
Use newer object destructuring syntax.
const trimHeadersLower = require('./trimHeadersLower'); module.exports = function checkTimeline({ timeline, fulfilmentValue }) { let lastEventOrError = null; // pull out the last event if it exists if (timeline.length > 0) { lastEventOrError = timeline[timeline.length - 1]; } if (lastEventOrError instanceof Error) { const name = lastEventOrError.name; if (name === 'Error' || name === 'UnexpectedError') { throw lastEventOrError; } else if (name === 'EarlyExitError') { // in the case of an early exirt error we need // to generate a diff from the last recorded // event & spec const failedEvent = timeline[timeline.length - 2]; const failedExchange = failedEvent.exchange; trimHeadersLower(failedExchange.request); lastEventOrError.data = { failedExchange: failedExchange, failedExchangeSpec: failedEvent.spec }; throw lastEventOrError; } else { // ignore to cause generation of a diff } } else if (lastEventOrError === null && fulfilmentValue) { return [null, fulfilmentValue]; } return [timeline, fulfilmentValue]; };
const trimHeadersLower = require('./trimHeadersLower'); module.exports = function checkTimeline(result) { const timeline = result.timeline; const fulfilmentValue = result.fulfilmentValue; let lastEventOrError = null; // pull out the last event if it exists if (timeline.length > 0) { lastEventOrError = timeline[timeline.length - 1]; } if (lastEventOrError instanceof Error) { const name = lastEventOrError.name; if (name === 'Error' || name === 'UnexpectedError') { throw lastEventOrError; } else if (name === 'EarlyExitError') { // in the case of an early exirt error we need // to generate a diff from the last recorded // event & spec const failedEvent = timeline[timeline.length - 2]; const failedExchange = failedEvent.exchange; trimHeadersLower(failedExchange.request); lastEventOrError.data = { failedExchange: failedExchange, failedExchangeSpec: failedEvent.spec }; throw lastEventOrError; } else { // ignore to cause generation of a diff } } else if (lastEventOrError === null && fulfilmentValue) { return [null, fulfilmentValue]; } return [timeline, fulfilmentValue]; };
Add minus button as well and combine streams
import React from 'react'; import { Link } from 'react-router'; import Cycle from 'cycle-react'; let Rx = Cycle.Rx; class Testink extends React.Component { render() { return <div> Cheers { this.props.test } </div>; } } let Clicker = Cycle.component( 'Counter', ( interactions ) => { return Rx.Observable.merge( interactions.get( 'plus' ) .map( () => 1 ), interactions.get( 'minus' ) .map( () => -1 ) ) .scan( 0, ( acc, i ) => acc + i ) .startWith( 0 ) .map( i => { return <div> <p> { i } </p> <button onClick={ interactions.listener( 'plus' ) }> plus one </button> <button onClick={ interactions.listener( 'minus' ) }> minus one </button> </div> } ); } ); let Counter = Cycle.component( 'Counter', ( ) => { return Rx.Observable.interval( 1000 ).map( i => <Testink test={ i } /> ); } ); export default class Home extends React.Component { render() { return ( <div> <div> <Link to="/about"> About </Link> </div> <div> <Link to="/map"> Map </Link> </div> <Clicker/> <p/> <Counter/> </div> ); } }
import React from 'react'; import { Link } from 'react-router'; import Cycle from 'cycle-react'; let Rx = Cycle.Rx; class Testink extends React.Component { render() { return <div> Cheers { this.props.test } </div>; } } let Clicker = Cycle.component( 'Counter', ( interactions ) => { return interactions.get( 'plus' ) .startWith( 0 ) .scan( 0, acc => acc + 1 ) //old Rx style. remove first argument (0) in rx 3+ .map( i => <button onClick={ interactions.listener( 'plus' ) }> plus one { i } </button> ); } ); let Counter = Cycle.component( 'Counter', ( ) => { return Rx.Observable.interval( 1000 ).map( i => <Testink test={ i } /> ); } ); export default class Home extends React.Component { render() { return ( <div> <div> <Link to="/about"> About </Link> </div> <div> <Link to="/map"> Map </Link> </div> <Clicker/> <Counter/> </div> ); } }
Add original method call interception for protocols
package introspect; import net.bytebuddy.instrumentation.method.bytecode.bind.annotation.*; import java.lang.invoke.MethodHandle; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.concurrent.Callable; import java.util.concurrent.atomic.AtomicInteger; import clojure.lang.IFn; public class Interceptor { public static IFn callback; @RuntimeType public static Object intercept(@SuperCall Callable method, @Origin Method originalMethod, @This Object proxy, @AllArguments Object[] allArguments) throws Throwable { Object returnValue = null; try { returnValue = method.call(); } catch (Throwable throwable) { callback.invoke(proxy, allArguments, throwable); throwable.printStackTrace(); throw throwable; } // System.out.printf("___%s, returnValue: %s___\n",proxy, returnValue); callback.invoke(proxy, originalMethod, allArguments, returnValue); return returnValue; } public static void setCallback(IFn fn) { callback = fn; } }
package introspect; import net.bytebuddy.instrumentation.method.bytecode.bind.annotation.*; import java.lang.invoke.MethodHandle; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.concurrent.Callable; import java.util.concurrent.atomic.AtomicInteger; import clojure.lang.IFn; public class Interceptor { public static IFn callback; @RuntimeType public static Object intercept(@SuperCall Callable method, @This Object proxy, @AllArguments Object[] allArguments) throws Throwable { Object returnValue = null; try { returnValue = method.call(); } catch (Throwable throwable) { callback.invoke(proxy, allArguments, throwable); throwable.printStackTrace(); throw throwable; } // System.out.printf("___%s, returnValue: %s___\n",proxy, returnValue); callback.invoke(proxy, allArguments, returnValue); return returnValue; } public static void setCallback(IFn fn) { callback = fn; } }
Use "$" instead of "jquery" to follow the import statement
/* * This file is part of the Sylius package. * * (c) Paweł Jędrzejewski * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ import 'semantic-ui-css/components/api'; import $ from 'jquery'; $.fn.extend({ taxonMoveUp() { const element = this; element.api({ method: 'PUT', on: 'click', beforeSend(settings) { /* eslint-disable-next-line no-param-reassign */ settings.data = { position: $(this).data('position') - 1, }; return settings; }, onSuccess() { window.location.reload(); }, }); }, taxonMoveDown() { const element = this; element.api({ method: 'PUT', on: 'click', beforeSend(settings) { /* eslint-disable-next-line no-param-reassign */ settings.data = { position: $(this).data('position') + 1, }; return settings; }, onSuccess() { window.location.reload(); }, }); }, });
/* * This file is part of the Sylius package. * * (c) Paweł Jędrzejewski * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ import 'semantic-ui-css/components/api'; import $ from 'jquery'; $.fn.extend({ taxonMoveUp() { const element = this; element.api({ method: 'PUT', on: 'click', beforeSend(settings) { /* eslint-disable-next-line no-param-reassign */ settings.data = { position: jquery(this).data('position') - 1, }; return settings; }, onSuccess() { window.location.reload(); }, }); }, taxonMoveDown() { const element = this; element.api({ method: 'PUT', on: 'click', beforeSend(settings) { /* eslint-disable-next-line no-param-reassign */ settings.data = { position: jquery(this).data('position') + 1, }; return settings; }, onSuccess() { window.location.reload(); }, }); }, });
Reduce sample count for xorshift_rand tests
# Copyright 2014 Anonymous7 from Reddit, Julian Andrews # # This software may be modified and distributed under the terms # of the MIT license. See the LICENSE file for details. from __future__ import absolute_import, division import collections import unittest import eval7.xorshift_rand class XorshiftRandTestCase(unittest.TestCase): SAMPLE_COUNT = 1000000 BINS = 1000 DELTA = 125 def check_uniform(self, counter): expected_count = self.SAMPLE_COUNT / self.BINS self.assertEqual(set(range(self.BINS)), set(counter.keys())) for count in counter.values(): self.assertAlmostEqual( count, expected_count, delta=self.DELTA ) def test_random_is_uniform(self): sample = ( eval7.xorshift_rand.random() for i in range(self.SAMPLE_COUNT) ) counter = collections.Counter(int(num * self.BINS) for num in sample) self.check_uniform(counter) def test_randint_is_uniform(self): sample = ( eval7.xorshift_rand.randint(self.BINS) for i in range(self.SAMPLE_COUNT) ) self.check_uniform(collections.Counter(sample))
# Copyright 2014 Anonymous7 from Reddit, Julian Andrews # # This software may be modified and distributed under the terms # of the MIT license. See the LICENSE file for details. from __future__ import absolute_import, division import collections import unittest import eval7.xorshift_rand class XorshiftRandTestCase(unittest.TestCase): SAMPLE_COUNT = 10000000 BINS = 1000 DELTA = 450 def check_uniform(self, counter): expected_count = self.SAMPLE_COUNT / self.BINS self.assertEqual(set(range(self.BINS)), set(counter.keys())) for count in counter.values(): self.assertAlmostEqual( count, expected_count, delta=self.DELTA ) def test_random_is_uniform(self): sample = ( eval7.xorshift_rand.random() for i in range(self.SAMPLE_COUNT) ) counter = collections.Counter(int(num * self.BINS) for num in sample) self.check_uniform(counter) def test_randint_is_uniform(self): sample = ( eval7.xorshift_rand.randint(self.BINS) for i in range(self.SAMPLE_COUNT) ) self.check_uniform(collections.Counter(sample))
Handle FileNotFound exception from build_asset
import mimetypes import posixpath import urllib from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.contrib.staticfiles.views import serve as staticfiles_serve from django.http import HttpResponse from gears.assets import build_asset from gears.exceptions import FileNotFound from .settings import environment def serve(request, path, **kwargs): if not settings.DEBUG and not kwargs.get('insecure'): raise ImproperlyConfigured( "The gears view can only be used in debug mode or if the " "--insecure option of 'runserver' is used.") normalized_path = posixpath.normpath(urllib.unquote(path)).lstrip('/') try: asset = build_asset(environment, normalized_path) except FileNotFound: return staticfiles_serve(request, path, **kwargs) mimetype, encoding = mimetypes.guess_type(normalized_path) mimetype = mimetype or 'application/octet-stream' response = HttpResponse(asset, mimetype=mimetype) if encoding: response['Content-Encoding'] = encoding return response
import mimetypes import posixpath import urllib from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.contrib.staticfiles.views import serve as staticfiles_serve from django.http import HttpResponse from gears.assets import build_asset from .settings import environment def serve(request, path, **kwargs): if not settings.DEBUG and not kwargs.get('insecure'): raise ImproperlyConfigured( "The gears view can only be used in debug mode or if the " "--insecure option of 'runserver' is used.") normalized_path = posixpath.normpath(urllib.unquote(path)).lstrip('/') asset = build_asset(environment, normalized_path) if not asset: return staticfiles_serve(request, path, **kwargs) mimetype, encoding = mimetypes.guess_type(normalized_path) mimetype = mimetype or 'application/octet-stream' response = HttpResponse(asset, mimetype=mimetype) if encoding: response['Content-Encoding'] = encoding return response
Add gaps "-" to reverse complement function
"""Utilities for working with sequences.""" import re from Bio import SeqIO CODON_LEN = 3 COMPLEMENT = str.maketrans('ACGTUWSMKRYBDHVNXacgtuwsmkrybdhvnx-', 'TGCAAWSKMYRVHDBNXtgcaawskmyrvhdbnx-') IS_PROTEIN = re.compile(r'[EFILPQ]', re.IGNORECASE) def reverse_complement(seq): """Reverse complement a nucleotide sequence. We added some wildcards.""" return seq.translate(COMPLEMENT)[::-1] def is_protein(seq): """Check if the sequence a protein.""" return IS_PROTEIN.search(seq) def fasta_file_has_protein(query_files): """ Search for protein characters in a fasta file. If the user has told us that we have a protein then return that. """ for query_file in query_files: with open(query_file) as in_file: for query in SeqIO.parse(in_file, 'fasta'): if is_protein(str(query.seq)): return True return False
"""Utilities for working with sequences.""" import re from Bio import SeqIO CODON_LEN = 3 COMPLEMENT = str.maketrans('ACGTUWSMKRYBDHVNXacgtuwsmkrybdhvnx', 'TGCAAWSKMYRVHDBNXtgcaawskmyrvhdbnx') IS_PROTEIN = re.compile(r'[EFILPQ]', re.IGNORECASE) def reverse_complement(seq): """Reverse complement a nucleotide sequence. We added some wildcards.""" return seq.translate(COMPLEMENT)[::-1] def is_protein(seq): """Check if the sequence a protein.""" return IS_PROTEIN.search(seq) def fasta_file_has_protein(query_files): """ Search for protein characters in a fasta file. If the user has told us that we have a protein then return that. """ for query_file in query_files: with open(query_file) as in_file: for query in SeqIO.parse(in_file, 'fasta'): if is_protein(str(query.seq)): return True return False
Add an explicit default for REDIS_URI
from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals # The base domain name for this installation. Used to control linking to # sub-domains. SERVER_NAME = "warehouse.local" # The URI for our PostgreSQL database. SQLALCHEMY_DATABASE_URI = "postgres:///warehouse" # The URI for our Redis database. REDIS_URI = "redis://localhost:6379/0" # The type of Storage to use. Can be either Filesystem or S3. STORAGE = "Filesystem" # The hash to use in computing filenames. # Allowed values: md5, sha1, sha224, sha256, sha384, sha512, None STORAGE_HASH = "md5" # Base directory for storage when using the Filesystem. STORAGE_DIRECTORY = "data" # The name of the bucket that files will be stored in when using S3. # STORAGE_BUCKET = "<storage bucket>" # The S3 Key used to access S3 when using S3 Storage # S3_KEY = "<S3 Key>" # The S3 Secret used to access S# when using S3 Storage # S3_SECRET = "<S3 Secret>"
from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals # The base domain name for this installation. Used to control linking to # sub-domains. SERVER_NAME = "warehouse.local" # The URI for our PostgreSQL database. SQLALCHEMY_DATABASE_URI = "postgres:///warehouse" # The type of Storage to use. Can be either Filesystem or S3. STORAGE = "Filesystem" # The hash to use in computing filenames. # Allowed values: md5, sha1, sha224, sha256, sha384, sha512, None STORAGE_HASH = "md5" # Base directory for storage when using the Filesystem. STORAGE_DIRECTORY = "data" # The name of the bucket that files will be stored in when using S3. # STORAGE_BUCKET = "<storage bucket>" # The S3 Key used to access S3 when using S3 Storage # S3_KEY = "<S3 Key>" # The S3 Secret used to access S# when using S3 Storage # S3_SECRET = "<S3 Secret>"
Allow DEV access in De Pinte
<?php // if you don't want to setup permissions the proper way, just uncomment the following PHP line // read http://symfony.com/doc/current/book/installation.html#configuration-and-setup for more information //umask(0000); // this check prevents access to debug front controllers that are deployed by accident to production servers. // feel free to remove this, extend it, or make something more sophisticated. if (!in_array(@$_SERVER['REMOTE_ADDR'], array( '127.0.0.1', '213.49.253.49', '78.22.168.46', '::1', ))) { header('HTTP/1.0 403 Forbidden'); exit('You are not allowed to access this file. Check '.basename(__FILE__).' for more information.'); } require_once __DIR__.'/../app/bootstrap.php.cache'; require_once __DIR__.'/../app/AppKernel.php'; use Symfony\Component\HttpFoundation\Request; $kernel = new AppKernel('dev', true); $kernel->loadClassCache(); $kernel->handle(Request::createFromGlobals())->send();
<?php // if you don't want to setup permissions the proper way, just uncomment the following PHP line // read http://symfony.com/doc/current/book/installation.html#configuration-and-setup for more information //umask(0000); // this check prevents access to debug front controllers that are deployed by accident to production servers. // feel free to remove this, extend it, or make something more sophisticated. if (!in_array(@$_SERVER['REMOTE_ADDR'], array( '127.0.0.1', '213.49.253.49', '::1', ))) { header('HTTP/1.0 403 Forbidden'); exit('You are not allowed to access this file. Check '.basename(__FILE__).' for more information.'); } require_once __DIR__.'/../app/bootstrap.php.cache'; require_once __DIR__.'/../app/AppKernel.php'; use Symfony\Component\HttpFoundation\Request; $kernel = new AppKernel('dev', true); $kernel->loadClassCache(); $kernel->handle(Request::createFromGlobals())->send();
Read from Stdin when no items are passed.
from __future__ import print_function import argparse import shcol import sys __all__ = ['main'] def main(cmd_args): parser = argparse.ArgumentParser( description='Generate columnized output for given string items.', version='shcol {}'.format(shcol.__version__) ) parser.add_argument( 'items', nargs='*', metavar='item', help='an item to columnize' ) parser.add_argument( '-s', '--spacing', metavar='N', type=int, default=2, help='number of blanks between two columns (default: 2)' ) parser.add_argument( '-w', '--width', metavar='N', type=int, default=80, help='maximal amount of characters per line (default: 80)' ) parser.add_argument( '-S', '--sort', action='store_true', default=False, help='sort the items' ) args = parser.parse_args(cmd_args[1:]) items = args.items or [line.rstrip('\n') for line in sys.stdin] print(shcol.columnize(items, args.spacing, args.width, args.sort))
from __future__ import print_function import argparse import shcol __all__ = ['main'] def main(cmd_args): parser = argparse.ArgumentParser( description='Generate columnized output for given string items.', version='shcol {}'.format(shcol.__version__) ) parser.add_argument('items', nargs='+', help='the items to columnize') parser.add_argument( '-s', '--spacing', metavar='N', type=int, default=2, help='number of blanks between two columns (default: 2)' ) parser.add_argument( '-w', '--width', metavar='N', type=int, default=80, help='maximal amount of characters per line (default: 80)' ) parser.add_argument( '-S', '--sort', help='sort the items', action='store_true', default=False ) print(shcol.columnize(args.items, args.spacing, args.width, args.sort))
Return promises. Fix missing return in else statement.
Application.Services.factory('ProcessList', ["Restangular", "$q", processListService]); function processListService(Restangular, $q) { var service = { processes: {} }; return { getProcesses: function(project_id){ var deffered = $q.defer(); var pid = project_id; if (service.processes.hasOwnProperty(project_id)){ deffered.resolve(service.processes[project_id]); } else { Restangular.one('processes').one('project', project_id) .getList().then(function(processes){ service.processes[pid] = processes; deffered.resolve(service.processes[pid]); }); } return deffered.promise; } }; }
Application.Services.factory('ProcessList', ["Restangular", "pubsub", processListService]); function processListService(Restangular, pubsub) { var service = { processes: {} }; return { getProcesses: function(project_id){ if (service.processes.hasOwnProperty(project_id)){ return service.processes[project_id]; } else { this.loadProcesses(project_id); } }, loadProcesses: function (project_id) { Restangular.one('processes').one('project', project_id).getList().then(function(data){ service.processes[project_id] = data; }); } }; }
Set a correct version number.
"""Setup script for `kozai`.""" from setuptools import setup def readme(): with open('README.md') as f: return f.read() setup( author='Joseph O\'Brien Antognini', author_email='joe.antognini@gmail.com', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2.7', 'Topic :: Scientific/Engineering :: Astronomy', 'Topic :: Scientific/Engineering :: Physics', ], description='Evolve hierarchical triples.', install_requires=['numpy>=1.18.4', 'scipy>=1.4.1'], keywords='kozai lidov triple dynamics orbit star', license='MIT', long_description=readme(), name='kozai', packages=['kozai'], python_requires='>=3.6', scripts=[ 'scripts/kozai', 'scripts/kozai-test-particle', 'scripts/kozai-ekm', ], tests_require=['pytest>=5.4.2'], url='https://github.com/joe-antognini/kozai', version='0.3.0', zip_safe=False, )
"""Setup script for `kozai`.""" from setuptools import setup def readme(): with open('README.md') as f: return f.read() setup( author='Joseph O\'Brien Antognini', author_email='joe.antognini@gmail.com', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2.7', 'Topic :: Scientific/Engineering :: Astronomy', 'Topic :: Scientific/Engineering :: Physics', ], description='Evolve hierarchical triples.', install_requires=['numpy>=1.18.4', 'scipy>=1.4.1'], keywords='kozai lidov triple dynamics orbit star', license='MIT', long_description=readme(), name='kozai', packages=['kozai'], python_requires='>=3.6', scripts=[ 'scripts/kozai', 'scripts/kozai-test-particle', 'scripts/kozai-ekm', ], tests_require=['pytest>=5.4.2'], url='https://github.com/joe-antognini/kozai', version='1.0.0', zip_safe=False, )
Change desktop resolution to 1280x960
const express = require('express'); const router = express.Router(); const puppeteer = require('puppeteer'); router.get('/', function (req, res, next) { puppeteer.launch().then(async browser => { const page = await browser.newPage(); const emulation = req.query.emulation || 'mobile'; const defaultWidth = emulation === 'mobile' ? 412 : 1280; const defaultHeight = emulation === 'mobile' ? 732 : 960; page.setViewport({ width: req.query.width ? parseInt(req.query.width, 10) : defaultWidth, height: req.query.heigh ? parseInt(req.query.height, 10) : defaultHeight }); await page.goto(req.query.url, {timeout: 60000}); const shot = await page.screenshot({}); await browser.close(); res.setHeader('Content-Type', 'image/png'); res.status(200).send(shot); }).catch(e => { console.error(e); res.status(500).send({error: 'Could not take screenshot'}); }); }); module.exports = router;
const express = require('express'); const router = express.Router(); const puppeteer = require('puppeteer'); router.get('/', function (req, res, next) { puppeteer.launch().then(async browser => { const page = await browser.newPage(); const emulation = req.query.emulation || 'mobile'; const defaultWidth = emulation === 'mobile' ? 412 : 1350; const defaultHeight = emulation === 'mobile' ? 732 : 940; page.setViewport({ width: req.query.width ? parseInt(req.query.width, 10) : defaultWidth, height: req.query.heigh ? parseInt(req.query.height, 10) : defaultHeight }); await page.goto(req.query.url, {timeout: 60000}); const shot = await page.screenshot({}); await browser.close(); res.setHeader('Content-Type', 'image/png'); res.status(200).send(shot); }).catch(e => { console.error(e); res.status(500).send({error: 'Could not take screenshot'}); }); }); module.exports = router;
Watch by default and switch to a different devtool
const webpack = require('webpack'); module.exports = { entry: { main: './javascript/app.jsx', vendor: ['react', 'react-dom', 'whatwg-fetch'] }, output: { path: './assets/javascript', filename: '[name].js' }, resolve: { extensions: ['.js', '.jsx'] }, plugins: [ new webpack.optimize.CommonsChunkPlugin({ names: ['vendor', 'manifest'] }) ], module: { loaders: [{ test: /\.(js|jsx)$/, exclude: /node_modules/, loader: "babel-loader", query: { presets: ["es2015", "react"] } }] }, devtool: 'eval-source-map', watch: true };
const webpack = require('webpack'); module.exports = { entry: { main: './javascript/app.jsx', vendor: ['react', 'react-dom', 'whatwg-fetch'] }, output: { path: './assets/javascript', filename: '[name].js' }, resolve: { extensions: ['.js', '.jsx'] }, plugins: [ new webpack.optimize.CommonsChunkPlugin({ names: ['vendor', 'manifest'] }) ], module: { loaders: [{ test: /\.(js|jsx)$/, exclude: /node_modules/, loader: "babel-loader", query: { presets: ["es2015", "react"] } }] }, devtool: 'cheap-source-map', watch: false };
Remove unnecessary variable from SM factory closure ($dbParams)
<?php return array( 'cdli-twostagesignup' => array( 'test_database_schema_up' => 'schema_up.mysql.sql', 'test_database_schema_down' => 'schema_down.sql', ), 'service_manager' => array( 'factories' => array( 'Zend\Db\Adapter\Adapter' => function ($sm) { return new Zend\Db\Adapter\Adapter(array( 'driver' => 'pdo', 'dsn' => 'mysql:dbname=travis_test;host=localhost', 'database' => 'travis_test', 'username' => 'root', 'password' => '', 'hostname' => 'localhost', )); }, ), ), );
<?php return array( 'cdli-twostagesignup' => array( 'test_database_schema_up' => 'schema_up.mysql.sql', 'test_database_schema_down' => 'schema_down.sql', ), 'service_manager' => array( 'factories' => array( 'Zend\Db\Adapter\Adapter' => function ($sm) use ($dbParams) { return new Zend\Db\Adapter\Adapter(array( 'driver' => 'pdo', 'dsn' => 'mysql:dbname=travis_test;host=localhost', 'database' => 'travis_test', 'username' => 'root', 'password' => '', 'hostname' => 'localhost', )); }, ), ), );
Make bash setup script actually work Fixes call to exec and labels PS1 work as non working (over-ridden by bashrc).
#! /usr/bin/env python # Author: Joseph Lisee <jlisee@gmail.com> import os import sys # Get the current directory cur_dir, _ = os.path.split(__file__) def main(): # Get our path env_dir = os.path.abspath(os.path.join(cur_dir, 'env')) # Set our path vars env_paths = { 'PATH' : os.path.join(env_dir, 'bin'), 'LD_LIBRARY_PATH' : os.path.join(env_dir, 'lib'), } for varname, varpath in env_paths.iteritems(): cur_var = os.environ.get(varname, None) if cur_var: os.environ[varname] = cur_var + os.pathsep + varpath else: os.environ[varname] = varpath # Setup up the PS1 (this doesn't work) os.environ['PS1'] = '(xpm) \u@\h:\w\$' # Step into shell os.execvp('bash', ['bash']) if __name__ == '__main__': sys.exit(main())
#! /usr/bin/env python # Author: Joseph Lisee <jlisee@gmail.com> import os import sys # Get the current directory cur_dir, _ = os.path.split(__file__) def main(): # Get our path env_dir = os.path.abspath(os.path.join(cur_dir, 'env')) # Set our path vars env_paths = { 'PATH' : os.path.join(env_dir, 'bin'), 'LD_LIBRARY_PATH' : os.path.join(env_dir, 'lib'), } for varname, varpath in env_paths.iteritems(): cur_var = os.environ.get(varname, None) if cur_var: os.environ[varname] = '%s;%s' % (varpath, cur_var) else: os.environ[varname] = varpath # Setup up the PS1 os.environ['PS1'] = '(xpm) \u@\h:\w\$' # Step into shell print os.execvp('bash', ['bash']) if __name__ == '__main__': sys.exit(main())
Move libraries from LDFLAGS to LIBS for cryptography recipe Because this is how you are supposed to do it, you must use LDFLAGS for linker flags and LDLIBS (or the equivalent LOADLIBES) for the libraries
from pythonforandroid.recipe import CompiledComponentsPythonRecipe, Recipe class CryptographyRecipe(CompiledComponentsPythonRecipe): name = 'cryptography' version = '2.4.2' url = 'https://github.com/pyca/cryptography/archive/{version}.tar.gz' depends = ['openssl', 'idna', 'asn1crypto', 'six', 'setuptools', 'enum34', 'ipaddress', 'cffi'] call_hostpython_via_targetpython = False def get_recipe_env(self, arch): env = super(CryptographyRecipe, self).get_recipe_env(arch) openssl_recipe = Recipe.get_recipe('openssl', self.ctx) env['CFLAGS'] += openssl_recipe.include_flags(arch) env['LDFLAGS'] += openssl_recipe.link_dirs_flags(arch) env['LIBS'] = openssl_recipe.link_libs_flags() return env recipe = CryptographyRecipe()
from pythonforandroid.recipe import CompiledComponentsPythonRecipe, Recipe class CryptographyRecipe(CompiledComponentsPythonRecipe): name = 'cryptography' version = '2.4.2' url = 'https://github.com/pyca/cryptography/archive/{version}.tar.gz' depends = ['openssl', 'idna', 'asn1crypto', 'six', 'setuptools', 'enum34', 'ipaddress', 'cffi'] call_hostpython_via_targetpython = False def get_recipe_env(self, arch): env = super(CryptographyRecipe, self).get_recipe_env(arch) openssl_recipe = Recipe.get_recipe('openssl', self.ctx) env['CFLAGS'] += openssl_recipe.include_flags(arch) env['LDFLAGS'] += openssl_recipe.link_flags(arch) return env recipe = CryptographyRecipe()
Include nominator record and nominator's full name in GET household responses
<?php namespace App; use Illuminate\Database\Eloquent\Model; class Household extends Model { protected $table = "household"; protected $fillable = [ "nominator_user_id", "name_first", "name_middle", "name_last", "dob", "race", "gender", "email", "last4ssn", "preferred_contact_method" ]; protected $appends = [ 'nominator_name' ]; protected $hidden = [ 'hidden' ]; public function child() { return $this->hasMany("\App\Child"); } public function address() { return $this->hasMany("\App\HouseholdAddress"); } public function nominator() { return $this->belongsTo("\App\User", "nominator_user_id", "id"); } public function phone() { return $this->hasMany("\App\HouseholdPhone"); } public function getNominatorNameAttribute() { return $this->nominator->name_first . " " . $this->nominator->name_last; } } # Household::find(1)->children() # $household = Household::find(1); # $children = $household->children();
<?php namespace App; use Illuminate\Database\Eloquent\Model; class Household extends Model { protected $table = "household"; protected $fillable = [ "nominator_user_id", "name_first", "name_middle", "name_last", "dob", "race", "gender", "email", "last4ssn", "preferred_contact_method" ]; public function child() { return $this->hasMany("\App\Child"); } public function address() { return $this->hasMany("\App\HouseholdAddress"); } public function phone() { return $this->hasMany("\App\HouseholdPhone"); } } # Household::find(1)->children() # $household = Household::find(1); # $children = $household->children();
Fix bug with unresponsive switch due to new framework
import React from 'react'; import PropTypes from 'prop-types'; import * as action from '../../action/'; import Switch from '@material-ui/core/Switch'; import Typography from '@material-ui/core/Typography'; import { connect } from 'react-redux'; class SwitchResource extends React.Component { handleChange = (field, value) => { if (this.props.isWriteable === true) { this.props.toggleSwitch(this.props.objectID, this.props.instanceID, this.props.resource.resourceID, this.props.currentValue); } }; render () { return ( <div> <Typography variant='subheading' align='center'>{this.props.resource.name}</Typography> <Switch checked={this.props.currentValue} onChange={this.handleChange.bind(this, 'switch')} /> </div> ); } } SwitchResource.propTypes = { currentValue: PropTypes.any, instanceID: PropTypes.number, isWriteable: PropTypes.bool, objectID: PropTypes.number, resource: PropTypes.object, toggleSwitch: PropTypes.func.isRequired }; const mapStateToProps = (state) => { return { }; }; const mapDispatchToProps = (dispatch) => { return { toggleSwitch: (objectID, instanceID, resourceID, value) => dispatch(action.sendRequest(objectID, instanceID, resourceID, !value)) }; }; export default connect(mapStateToProps, mapDispatchToProps)(SwitchResource);
import React from 'react'; import PropTypes from 'prop-types'; import * as action from '../../action/'; import Switch from '@material-ui/core/Switch'; import Typography from '@material-ui/core/Typography'; import { connect } from 'react-redux'; class SwitchResource extends React.Component { handleChange = (field, value) => { if (this.props.isWriteable === true) { this.props.toggleSwitch(this.props.objectID, this.props.instanceID, this.props.resource.resourceID, value); } }; render () { return ( <div> <Typography variant='subheading' align='center'>{this.props.resource.name}</Typography> <Switch checked={this.props.currentValue} onChange={this.handleChange.bind(this, 'switch')} /> </div> ); } } SwitchResource.propTypes = { currentValue: PropTypes.any, instanceID: PropTypes.number, isWriteable: PropTypes.bool, objectID: PropTypes.number, resource: PropTypes.object, toggleSwitch: PropTypes.func.isRequired }; const mapStateToProps = (state) => { return { }; }; const mapDispatchToProps = (dispatch) => { return { toggleSwitch: (objectID, instanceID, resourceID, value) => dispatch(action.sendRequest(objectID, instanceID, resourceID, value)) }; }; export default connect(mapStateToProps, mapDispatchToProps)(SwitchResource);
Refactor Vert.x 3 event loop context to store context upon construction in order to prevent issues with calling runOnContext from a non-context thread.
/* * Copyright 2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.kuujo.copycat.vertx; import io.vertx.core.Context; import io.vertx.core.Vertx; import java.util.concurrent.Executor; /** * Vert.x execution context. * * @author <a href="http://github.com/kuujo">Jordan Halterman</a> */ public class VertxEventLoopExecutor implements Executor { private final Context context; public VertxEventLoopExecutor(Vertx vertx) { this.context = vertx.getOrCreateContext(); } @Override public void execute(Runnable command) { context.runOnContext(v -> command.run()); } }
/* * Copyright 2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.kuujo.copycat.vertx; import io.vertx.core.Vertx; import java.util.concurrent.Executor; /** * Vert.x execution context. * * @author <a href="http://github.com/kuujo">Jordan Halterman</a> */ public class VertxEventLoopExecutor implements Executor { private final Vertx vertx; public VertxEventLoopExecutor(Vertx vertx) { this.vertx = vertx; } @Override public void execute(Runnable command) { vertx.runOnContext(v -> command.run()); } }
Add a comment with a TODO
import re __all__ = ["recursively_remove_ltac"] LTAC_REG = re.compile(r'^\s*(?:Local\s+|Global\s+)?Ltac\s+([^\s]+)', re.MULTILINE) def recursively_remove_ltac(statements, exclude_n=3): """Removes any Ltac statement which is not used later in statements. Does not remove any code in the last exclude_n statements.""" # TODO(jgross): Figure out what the optimal exclude_n is. It's # probably 1 or 2, but I don't want to accidentally exclude the # line generating the bug, so I'm trying to be a bit safer rtn = list(reversed(statements))[:exclude_n] for statement in reversed(statements)[exclude_n:]: match = LTAC_REG.search(statement) if match: ltac_name = match.groups()[0] # search for the name of the tactic, by itself reg = re.compile('\b%s\b' % ltac_name, re.MULTILINE) if any(reg.search(other_statement) for other_statement in rtn): rtn.append(statement) else: rtn.append(statement) return list(reversed(rtn))
import re __all__ = ["recursively_remove_ltac"] LTAC_REG = re.compile(r'^\s*(?:Local\s+|Global\s+)?Ltac\s+([^\s]+)', re.MULTILINE) def recursively_remove_ltac(statements, exclude_n=3): """Removes any Ltac statement which is not used later in statements. Does not remove any code in the last exclude_n statements.""" rtn = list(reversed(statements))[:exclude_n] for statement in reversed(statements)[exclude_n:]: match = LTAC_REG.search(statement) if match: ltac_name = match.groups()[0] # search for the name of the tactic, by itself reg = re.compile('\b%s\b' % ltac_name, re.MULTILINE) if any(reg.search(other_statement) for other_statement in rtn): rtn.append(statement) else: rtn.append(statement) return list(reversed(rtn))
Enable spray to find sbt
import subprocess import sys import time import os def start(args, logfile, errfile): if os.name == 'nt': subprocess.check_call('"..\\sbt\\sbt.bat" assembly', shell=True, cwd="spray", stderr=errfile, stdout=logfile) else: subprocess.check_call("$FWROOT/sbt/sbt assembly", shell=True, cwd="spray", stderr=errfile, stdout=logfile) subprocess.Popen("java -jar target/scala-2.10/spray-benchmark-assembly-1.0.jar", cwd="spray", shell=True, stderr=errfile, stdout=logfile) return 0 def stop(logfile, errfile): if os.name == 'nt': subprocess.check_call("wmic process where \"CommandLine LIKE '%spray-benchmark%'\" call terminate", stderr=errfile, stdout=logfile) else: p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE) out, err = p.communicate() for line in out.splitlines(): if 'spray-benchmark' in line: try: pid = int(line.split(None, 2)[1]) os.kill(pid, 15) except OSError: pass return 0
import subprocess import sys import time import os def start(args, logfile, errfile): if os.name == 'nt': subprocess.check_call('"..\\sbt\\sbt.bat" assembly', shell=True, cwd="spray", stderr=errfile, stdout=logfile) else: subprocess.check_call("../sbt/sbt assembly", shell=True, cwd="spray", stderr=errfile, stdout=logfile) subprocess.Popen("java -jar target/scala-2.10/spray-benchmark-assembly-1.0.jar", cwd="spray", shell=True, stderr=errfile, stdout=logfile) time.sleep(5) return 0 def stop(logfile, errfile): if os.name == 'nt': subprocess.check_call("wmic process where \"CommandLine LIKE '%spray-benchmark%'\" call terminate", stderr=errfile, stdout=logfile) else: p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE) out, err = p.communicate() for line in out.splitlines(): if 'spray-benchmark' in line: try: pid = int(line.split(None, 2)[1]) os.kill(pid, 15) except OSError: pass return 0
Change thing.tasks.table_cleaner to delete TaskState objects for any invalid APIKeys
import datetime from celery import task from celery.utils.log import get_task_logger logger = get_task_logger(__name__) from django.db.models import Q from thing.models import APIKey, TaskState # --------------------------------------------------------------------------- # Periodic task to perform database table cleanup @task(name='thing.table_cleaner') def table_cleaner(): utcnow = datetime.datetime.utcnow() queued_timeout = utcnow - datetime.timedelta(minutes=120) # Build a QuerySet to find broken tasks taskstates = TaskState.objects.filter(state=TaskState.QUEUED_STATE, mod_time__lte=queued_timeout) for ts in taskstates: logger.warn('[table_cleaner] Stuck task: %d | %d | %s | %s', ts.id, ts.keyid, ts.parameter, ts.url) count = taskstates.update(mod_time=utcnow, next_time=utcnow, state=TaskState.READY_STATE) if count > 0: logger.warn('[table_cleaner] Reset %d broken task(s)', count) # Build a QuerySet to find tasks that refer to no longer existent keys taskstates = TaskState.objects.exclude( Q(keyid=-1) | Q(keyid__in=APIKey.objects.filter(valid=True).values('keyid')) ) taskstates.delete() # ---------------------------------------------------------------------------
import datetime from celery import task from celery.utils.log import get_task_logger logger = get_task_logger(__name__) from django.db.models import Q from thing.models import APIKey, TaskState # --------------------------------------------------------------------------- # Periodic task to perform database table cleanup @task(name='thing.table_cleaner') def table_cleaner(): utcnow = datetime.datetime.utcnow() queued_timeout = utcnow - datetime.timedelta(minutes=120) # Build a QuerySet to find broken tasks taskstates = TaskState.objects.filter(state=TaskState.QUEUED_STATE, mod_time__lte=queued_timeout) for ts in taskstates: logger.warn('[table_cleaner] Stuck task: %d | %d | %s | %s', ts.id, ts.keyid, ts.parameter, ts.url) count = taskstates.update(mod_time=utcnow, next_time=utcnow, state=TaskState.READY_STATE) if count > 0: logger.warn('[table_cleaner] Reset %d broken task(s)', count) # Build a QuerySet to find tasks that refer to no longer existent keys taskstates = TaskState.objects.exclude( Q(keyid=-1) | Q(keyid__in=APIKey.objects.values('keyid')) ) taskstates.delete() # ---------------------------------------------------------------------------
Add b option to string
# -*- coding: utf-8 -*- import os import unittest from testfixtures import TempDirectory from saltshaker.config import load_config from saltshaker.exceptions import ConfigurationError class TestConfig(unittest.TestCase): def setUp(self): self.tmp_dir = TempDirectory() def tearDown(self): self.tmp_dir.cleanup() def test_load_config_for_path_none(self): self.assertEqual(load_config(None), {}) def test_load_config_for_invalid_path(self): self.assertRaises(ConfigurationError, load_config, '/some/path') def test_load_config_for_valid_path(self): path = self._write_config_file('.shaker.yml', b'') self.assertEqual(load_config(path), None) def test_load_config_with_sources(self): path = self._write_config_file('.shaker.yml', b'sources:') self.assertEqual(load_config(path), { 'sources': None }) def _write_config_file(self, filename, filecontent): self.tmp_dir.write(filename, filecontent) return os.path.join(self.tmp_dir.path, filename)
# -*- coding: utf-8 -*- import os import unittest from testfixtures import TempDirectory from saltshaker.config import load_config from saltshaker.exceptions import ConfigurationError class TestConfig(unittest.TestCase): def setUp(self): self.tmp_dir = TempDirectory() def tearDown(self): self.tmp_dir.cleanup() def test_load_config_for_path_none(self): self.assertEqual(load_config(None), {}) def test_load_config_for_invalid_path(self): self.assertRaises(ConfigurationError, load_config, '/some/path') def test_load_config_for_valid_path(self): path = self._write_config_file('.shaker.yml', '') self.assertEqual(load_config(path), None) def test_load_config_with_sources(self): path = self._write_config_file('.shaker.yml', b'sources:') self.assertEqual(load_config(path), { 'sources': None }) def _write_config_file(self, filename, filecontent): self.tmp_dir.write(filename, filecontent) return os.path.join(self.tmp_dir.path, filename)
Fix writing strings in FileOutputBenchmark via FileLogState
package org.tinylog.benchmarks.file; import java.io.IOException; import java.nio.file.Path; import org.openjdk.jmh.annotations.Param; import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.State; import org.tinylog.impl.writers.file.LogFile; /** * State for writing strings by using {@link LogFile}. */ @State(Scope.Thread) public class LogFileState extends AbstractState<LogFile> { /** * The buffer sizes to benchmark. */ @Param({ "1024", "2048", "4096", "8192", "16384", "32768", "65536", "131072" }) private int bufferSize; /** */ public LogFileState() { } /** * @param bufferSize The buffer size in bytes */ public LogFileState(int bufferSize) { this.bufferSize = bufferSize; } @Override public void write(String content) throws IOException { instance.write(content.getBytes(CHARSET)); } @Override protected LogFile create(Path path) throws IOException { return new LogFile(path.toString(), bufferSize, CHARSET, false); } }
package org.tinylog.benchmarks.file; import java.io.IOException; import java.nio.file.Path; import org.openjdk.jmh.annotations.Param; import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.State; import org.tinylog.impl.writers.file.LogFile; /** * State for writing strings by using {@link LogFile}. */ @State(Scope.Thread) public class LogFileState extends AbstractState<LogFile> { /** * The buffer sizes to benchmark. */ @Param({ "1024", "2048", "4096", "8192", "16384", "32768", "65536", "131072" }) private int bufferSize; /** */ public LogFileState() { } /** * @param bufferSize The buffer size in bytes */ public LogFileState(int bufferSize) { this.bufferSize = bufferSize; } @Override public void write(String content) throws IOException { instance.write(content); } @Override protected LogFile create(Path path) throws IOException { return new LogFile(path.toString(), bufferSize, CHARSET, false); } }
Add warning message to tricky template tag
# -*- coding: utf-8 -*- from __future__ import print_function from __future__ import unicode_literals from __future__ import division import re from django import template from django.conf import settings register = template.Library() _remove_slash_re = re.compile(r'/+') def _urljoin(*args): """Joins relative URLs, collapsing consecutive '/'""" url = "/".join(args) return _remove_slash_re.sub('/', url) @register.filter def static_url(static_file): if settings.DEBUG: return _urljoin(settings.STATIC_URL, static_file) static_file_mapping = settings.STATIC_FILES_MAPPING if static_file not in static_file_mapping: # !!! WARNING !!! this may cause your templates to silently fail # If template A includes template B and template B has uses this # templatetag and results in this exception, template B will be # rendered blank inside of template A, instead of crashing. raise Exception('Static file %s not found in rev-manifest.json, ' 'did you forget to run "npm run build"?' % static_file) return _urljoin(settings.STATIC_URL, static_file_mapping[static_file])
# -*- coding: utf-8 -*- from __future__ import print_function from __future__ import unicode_literals from __future__ import division import re from django import template from django.conf import settings register = template.Library() _remove_slash_re = re.compile(r'/+') def _urljoin(*args): """Joins relative URLs, collapsing consecutive '/'""" url = "/".join(args) return _remove_slash_re.sub('/', url) @register.filter def static_url(static_file): if settings.DEBUG: return _urljoin(settings.STATIC_URL, static_file) static_file_mapping = settings.STATIC_FILES_MAPPING if static_file not in static_file_mapping: raise Exception('Static file %s not found in rev-manifest.json, ' 'did you forget to run "npm run build"?' % static_file) return _urljoin(settings.STATIC_URL, static_file_mapping[static_file])
Update the rest connector config
var asteroid = require("asteroid"); module.exports = asteroid.createDataSource({ connector: require("asteroid-connector-rest"), debug: false, operations: [ { template: { "method": "GET", "url": "http://maps.googleapis.com/maps/api/geocode/{format=json}", "headers": { "accepts": "application/json", "content-type": "application/json" }, "query": { "address": "{street},{city},{zipcode}", "sensor": "{sensor=false}" }, "responsePath": "$.results[0].geometry.location" }, functions: { "geocode": ["street", "city", "zipcode"] } } ]});
var asteroid = require('asteroid'); module.exports = asteroid.createDataSource({ connector: require('asteroid-connector-rest'), debug: false, operations: [ { name: 'geocode', parameters: ['street', 'city', 'zipcode'], request: { "method": "GET", "url": "http://maps.googleapis.com/maps/api/geocode/{format=json}", "headers": { "accepts": "application/json", "content-type": "application/json" }, "query": { "address": "{street},{city},{zipcode}", "sensor": "{sensor=false}" }, "responsePath": "$.results[0].geometry.location" } } ]});
Fix indexing in active tickets
import { Template } from 'meteor/templating'; import '/imports/ui/components/ticket/ticket.js'; import './queue-table.html'; Template.QueueTable.helpers({ noActiveTickets(queue) { return queue.activeTickets().count() === 0; }, rows(queue) { let index = 1; const rows = []; const tickets = queue.tickets().fetch(); tickets.forEach((ticket) => { if (ticket.isActive()) { ticket.index = index; // eslint-disable-line no-param-reassign rows.push(ticket); index += 1; } if (queue.isCutoff() && (ticket._id === queue.cutoffAfter)) { rows.push({ isCutoffMarker: true, }); } }); return rows; }, });
import { Template } from 'meteor/templating'; import '/imports/ui/components/ticket/ticket.js'; import './queue-table.html'; Template.QueueTable.helpers({ noActiveTickets(queue) { return queue.activeTickets().count() === 0; }, rows(queue) { const rows = []; const tickets = queue.tickets().fetch(); tickets.forEach((ticket, i) => { if (ticket.isActive()) { ticket.index = i + 1; // eslint-disable-line no-param-reassign rows.push(ticket); } if (queue.isCutoff() && (ticket._id === queue.cutoffAfter)) { rows.push({ isCutoffMarker: true, }); } }); return rows; }, });
Increase wait time to 1 second
var net = require('net'); utils = require('radiodan-client').utils, logger = utils.logger(__filename); function create(port, socket) { var deferred = utils.promise.defer(), socket = socket || new net.Socket(), timeout = 1000; socket.setTimeout(2500); function addHandlers() { socket.on('connect', handleConnect); socket.on('error', handleError); } function handleConnect() { deferred.resolve(port); } function handleError() { logger.warn('Cannot connect'); setTimeout(function () { socket.connect(port); }, timeout); } function connect() { logger.debug('Connecting to port', port); addHandlers(); socket.connect(port); return deferred.promise; } return { _addHandlers: addHandlers, connect: connect }; } module.exports = { create: create };
var net = require('net'); utils = require('radiodan-client').utils, logger = utils.logger(__filename); function create(port, socket) { var deferred = utils.promise.defer(), socket = socket || new net.Socket(), timeout = 10; socket.setTimeout(2500); function addHandlers() { socket.on('connect', handleConnect); socket.on('error', handleError); } function handleConnect() { deferred.resolve(port); } function handleError() { logger.warn('Cannot connect'); setTimeout(function () { socket.connect(port); }, timeout); } function connect() { logger.debug('Connecting to port', port); addHandlers(); socket.connect(port); return deferred.promise; } return { _addHandlers: addHandlers, connect: connect }; } module.exports = { create: create };
Set circle to follow the marker
$(document).ready(function () { var center = new google.maps.LatLng(35.681298, 139.766247); var mapOptions = { zoom: 12, center: center }; var map = new google.maps.Map(document.getElementById('map'), mapOptions); var marker = new google.maps.Marker({ position: map.getCenter(), draggable: true, map: map }); marker.addListener("drag", function() { circle.setCenter(marker.getPosition()); }); var geocoder = new google.maps.Geocoder(); var circle = new google.maps.Circle({ strokeColor: '#FF0000', strokeOpacity: 0.8, strokeWeight: 2, fillColor: '#FF0000', fillOpacity: 0.35, map: map, center: center, radius: 3000 }); $("#search").click(function(){ var address = $("#address").val(); geocoder.geocode({ 'address': address }, function (results, status) { if (status === google.maps.GeocoderStatus.OK) { var loc = results[0].geometry.location; map.setCenter(loc); marker.setPosition(loc); circle.setCenter(loc); $("#latitude").val(loc.lat()); $("#longitude").val(loc.lng()); } }); }); });
$(document).ready(function () { var center = new google.maps.LatLng(35.681298, 139.766247); var mapOptions = { zoom: 12, center: center }; var map = new google.maps.Map(document.getElementById('map'), mapOptions); var marker = new google.maps.Marker({ position: map.getCenter(), draggable: true, map: map }); var geocoder = new google.maps.Geocoder(); var circle = new google.maps.Circle({ strokeColor: '#FF0000', strokeOpacity: 0.8, strokeWeight: 2, fillColor: '#FF0000', fillOpacity: 0.35, map: map, center: center, radius: 3000 }); $("#search").click(function(){ var address = $("#address").val(); geocoder.geocode({ 'address': address }, function (results, status) { if (status === google.maps.GeocoderStatus.OK) { var loc = results[0].geometry.location; map.setCenter(loc); marker.setPosition(loc); circle.setCenter(loc); $("#latitude").val(loc.lat()); $("#longitude").val(loc.lng()); } }); }); });
Make sure encode chunk size is in multiples of three bytes
'use strict'; const applyDefaultOpts = opts => Object.assign({}, { encoding: 'utf8', chunkSize: 3 }, opts); const b64 = () => { }; b64.encode = (input, opts) => new Promise(resolve => { opts = applyDefaultOpts(opts); if (!(input instanceof Buffer)) { input = Buffer.from(input, opts.encoding); } const chunkMultiple = 3; opts.chunkSize = Math.max(chunkMultiple, (Math.ceil(opts.chunkSize / chunkMultiple) * chunkMultiple)); const bufferLength = input.length; let currentIndex = 0; let output = ''; setImmediate(function encodeChunk() { const chunk = input.slice(currentIndex, currentIndex + opts.chunkSize); output += chunk.toString('base64'); currentIndex += opts.chunkSize; if (currentIndex < bufferLength) { setImmediate(encodeChunk); } else { resolve(output); } }); }); module.exports = b64;
'use strict'; const applyDefaultOpts = opts => Object.assign({}, { encoding: 'utf8', chunkSize: 3 }, opts); const b64 = () => { }; b64.encode = (input, opts) => new Promise(resolve => { opts = applyDefaultOpts(opts); if (!(input instanceof Buffer)) { input = Buffer.from(input, opts.encoding); } const bufferLength = input.length; let currentIndex = 0; let output = ''; setImmediate(function encodeChunk() { const chunk = input.slice(currentIndex, currentIndex + opts.chunkSize); output += chunk.toString('base64'); currentIndex += opts.chunkSize; if (currentIndex < bufferLength) { setImmediate(encodeChunk); } else { resolve(output); } }); }); module.exports = b64;
Rename the signed_div test case, and ... disable the multithreaded test case.
import nose import angr import subprocess import logging l = logging.getLogger('angr.tests.test_signed_div') import os test_location = str(os.path.dirname(os.path.realpath(__file__))) def run_signed_div(): test_bin = os.path.join(test_location, "../../binaries-private/tests/i386/test_signed_div") b = angr.Project(test_bin) pg = b.factory.path_group() pg.explore() out_angr = pg.deadended[0].state.posix.dumps(1) proc = subprocess.Popen(test_bin, stdout=subprocess.PIPE) stdout_real, _ = proc.communicate() nose.tools.assert_equal(out_angr, stdout_real) def test_signed_div(): yield run_signed_div if __name__ == "__main__": run_signed_div()
import nose import angr import subprocess import logging l = logging.getLogger('angr.tests.test_signed_div') import os test_location = str(os.path.dirname(os.path.realpath(__file__))) def run_strtol(threads): test_bin = os.path.join(test_location, "../../binaries-private/tests/i386/test_signed_div") b = angr.Project(test_bin) pg = b.factory.path_group() pg.explore() out_angr = pg.deadended[0].state.posix.dumps(1) proc = subprocess.Popen(test_bin, stdout=subprocess.PIPE) stdout_real, _ = proc.communicate() nose.tools.assert_equal(out_angr, stdout_real) def test_strtol(): yield run_strtol, None yield run_strtol, 8 if __name__ == "__main__": run_strtol(4)
Correct complex integrator for scalar equations
#!/usr/bin/env python # encoding: utf-8 from __future__ import division, print_function import numpy as np from scipy.integrate import ode def zodeint(func, y0, t, **kwargs): """Simple wraper around scipy.integrate.ode for complex valued problems. :param func: Right hand side of the equation dy/dt = f(t, y) :param y0: Initial value at t = t[0] :param t: Sequence of time points for whihc to solve for y :returns: y[len(t), len(y0)] """ y0 = np.array([y0]) if np.isscalar(y0) else y0 integrator = ode(func) \ .set_integrator('zvode', with_jacobian=False, **kwargs) \ .set_initial_value(y0) y = np.empty((len(t), len(y0)), dtype=complex) y[0] = y0 for i in xrange(1, len(t)): integrator.integrate(t[i]) if not integrator.successful(): print('WARNING: Integrator failed') break y[i] = integrator.y return t[:i+1], y[:i+1]
#!/usr/bin/env python # encoding: utf-8 from __future__ import division, print_function import numpy as np from scipy.integrate import ode def zodeint(func, y0, t): """Simple wraper around scipy.integrate.ode for complex valued problems. :param func: Right hand side of the equation dy/dt = f(t, y) :param y0: Initial value at t = t[0] :param t: Sequence of time points for whihc to solve for y :returns: y[len(t), len(y0)] """ integrator = ode(func) \ .set_integrator('zvode', with_jacobian=False) \ .set_initial_value(y0) y = np.empty((len(t), len(y0)), dtype=complex) y[0] = y0 for i in xrange(1, len(t)): integrator.integrate(t[i]) if not integrator.successful(): print('WARNING: Integrator failed') break y[i] = integrator.y return t[:i+1], y[:i+1]
Remove commented code from exception
<?php /** * Mailgun plugin for CakePHP 3 * Copyright (c) Narendra Vaghela (http://www.narendravaghela.com) * * Licensed under The MIT License * For full copyright and license information, please see the LICENSE.md * Redistributions of files must retain the above copyright notice. * * @copyright Narendra Vaghela (http://www.narendravaghela.com) * @link https://github.com/narendravaghela/cakephp-mailgun * @license http://www.opensource.org/licenses/mit-license.php MIT License */ namespace MailgunEmail\Mailer\Exception; use Cake\Core\Exception\Exception; /** * Missing Credentials exception - used when API Key or Domain is not set. */ class MissingCredentialsException extends Exception { /** * {@inheritDoc} */ protected $_messageTemplate = 'Mailgun %s is not set.'; }
<?php /** * Mailgun plugin for CakePHP 3 * Copyright (c) Narendra Vaghela (http://www.narendravaghela.com) * * Licensed under The MIT License * For full copyright and license information, please see the LICENSE.md * Redistributions of files must retain the above copyright notice. * * @copyright Narendra Vaghela (http://www.narendravaghela.com) * @link https://github.com/narendravaghela/cakephp-mailgun * @license http://www.opensource.org/licenses/mit-license.php MIT License */ namespace MailgunEmail\Mailer\Exception; use Cake\Core\Exception\Exception; /** * Missing Credentials exception - used when API Key or Domain is not set. */ class MissingCredentialsException extends Exception { /** * {@inheritDoc} */ protected $_messageTemplate = 'Mailgun %s is not set.'; /** * {@inheritDoc} */ // public function __construct($message, $code = 404) // { // parent::__construct($message, $code); // } }
chore: Update classifiers to include python 3.10
import os from setuptools import setup, find_packages def read(fname): with open(os.path.join(os.path.dirname(__file__), fname)) as f: return f.read() setup( name='pynubank', version='2.17.0', url='https://github.com/andreroggeri/pynubank', author='André Roggeri Campos', author_email='a.roggeri.c@gmail.com', license='MIT', packages=find_packages(), package_data={'pynubank': ['queries/*.gql', 'utils/mocked_responses/*.json']}, install_requires=['requests', 'qrcode', 'pyOpenSSL', 'colorama', 'requests-pkcs12'], setup_requires=['pytest-runner'], long_description=read("README.md"), long_description_content_type="text/markdown", entry_points={ 'console_scripts': [ 'pynubank = pynubank.cli:main' ] }, classifiers=[ 'Programming Language :: Python', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', ] )
import os from setuptools import setup, find_packages def read(fname): with open(os.path.join(os.path.dirname(__file__), fname)) as f: return f.read() setup( name='pynubank', version='2.17.0', url='https://github.com/andreroggeri/pynubank', author='André Roggeri Campos', author_email='a.roggeri.c@gmail.com', license='MIT', packages=find_packages(), package_data={'pynubank': ['queries/*.gql', 'utils/mocked_responses/*.json']}, install_requires=['requests', 'qrcode', 'pyOpenSSL', 'colorama', 'requests-pkcs12'], setup_requires=['pytest-runner'], long_description=read("README.md"), long_description_content_type="text/markdown", entry_points={ 'console_scripts': [ 'pynubank = pynubank.cli:main' ] }, classifiers=[ 'Programming Language :: Python', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', ] )
Modify schema to include a category for each emoticon
const express = require('express'), app = express(), bodyParser = require('body-parser'), methodOverride = require('method-override'), mongoose = require('mongoose'); mongoose.connect('mongodb://localhost/mojibox'); app.set('view engine', 'pug'); app.use(express.static(__dirname + '/public')); app.use(bodyParser.urlencoded({extended: true})); app.use(methodOverride('_method')); if (app.get('env') === 'development') { app.use(function(err, req, res, next) { res.status(err.status || 500); res.render('error', { message: err.message, error: err }); }); } const emoticonSchema = new mongoose.Schema({ content: String, category: String }); const Emoticon = mongoose.model('Emoticon', emoticonSchema); // '/' app.get('/', function(req, res, next) { res.render('index'); // res.redirect('/collection'); }); // '/collection' app.get('/collection', function(req, res, next) { res.render('show', {collection}); }); // '/add' app.post('/add', function(req, res, next) { collection.push(); }); // '/delete' app.delete('/collection/:id', function(req, res, next) { }); app.listen(3000, function() { console.log('Server is listening on port 3000'); });
const express = require('express'), app = express(), bodyParser = require('body-parser'), methodOverride = require('method-override'), mongoose = require('mongoose'); mongoose.connect('mongodb://localhost/mojibox'); app.set('view engine', 'pug'); app.use(express.static(__dirname + '/public')); app.use(bodyParser.urlencoded({extended: true})); app.use(methodOverride('_method')); if (app.get('env') === 'development') { app.use(function(err, req, res, next) { res.status(err.status || 500); res.render('error', { message: err.message, error: err }); }); } const emoticonSchema = new mongoose.Schema({ content: String }); const Emoticon = mongoose.model('Emoticon', emoticonSchema); // '/' app.get('/', function(req, res, next) { res.render('index'); // res.redirect('/collection'); }); // '/collection' app.get('/collection', function(req, res, next) { res.render('show', {collection}); }); // '/add' app.post('/add', function(req, res, next) { collection.push(); }); // '/delete' app.delete('/collection/:id', function(req, res, next) { }); app.listen(3000, function() { console.log('Server is listening on port 3000'); });
Fix check on error messages.
/* * grunt-check-clean * https://github.com/the-software-factory/grunt-check-clean * * Copyright (c) 2015 Stéphane Bisinger * Licensed under the MIT license. */ 'use strict'; module.exports = function(grunt) { // Please see the Grunt documentation for more information regarding task // creation: http://gruntjs.com/creating-tasks grunt.registerTask('check_clean', 'Ensure the git index is clean and that there are no untracked files or directories.', function() { var done = this.async(); grunt.util.spawn({ cmd: 'git', args: ['status', '--porcelain'] }, function(error, result) { var ret = 0; if (error || result.stdout.length > 0) { console.log(error); console.log(result.stdout); ret = new Error("The git index is not clean. Ensure there are no uncommitted changes or untracked files."); } done(ret); }); }); };
/* * grunt-check-clean * https://github.com/the-software-factory/grunt-check-clean * * Copyright (c) 2015 Stéphane Bisinger * Licensed under the MIT license. */ 'use strict'; module.exports = function(grunt) { // Please see the Grunt documentation for more information regarding task // creation: http://gruntjs.com/creating-tasks grunt.registerTask('check_clean', 'Ensure the git index is clean and that there are no untracked files or directories.', function() { var done = this.async(); grunt.util.spawn({ cmd: 'git', args: ['status', '--porcelain'] }, function(error, result) { var ret = 0; if (error !== 0 || result.stdout.length > 0) { ret = new Error("The git index is not clean. Ensure there are no uncommitted changes or untracked files."); } done(ret); }); }); };
Add servlet example for external redirection
package de.asw.courses.web2016.demo.backend.servlet.redir; import java.io.IOException; import javax.servlet.ServletException; import javax.servlet.annotation.WebServlet; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; /** * {@code http://localhost:8080/asw2016-demo-webapp/redirect?redirect_uri=http://www.google.de } * @author tom * */ @WebServlet("/redirect") public class ExternalRedirectionExampleServlet extends HttpServlet{ @Override protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { String redirectUri = req.getParameter("redirect_uri"); resp.sendRedirect(redirectUri); // sends a 302 Temporary Redirect } }
package de.asw.courses.web2016.demo.backend.servlet.redir; import java.io.IOException; import javax.servlet.ServletException; import javax.servlet.annotation.WebServlet; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; /** * {@code http://localhost:8080/asw2016-demo-webapp/redirect?redirect_uri=http://www.google.de } * @author tom * */ @WebServlet("/redirect") public class ExternalRedirectionExampleServlet extends HttpServlet{ @Override protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { String redirectUri = req.getParameter("redirect_uri"); resp.sendRedirect(redirectUri); // sends a 302 Temporary Redirect } }
Fix translation sanitizing to work for plurals.
var through = require( 'through2' ); var gutil = require( 'gulp-util' ); var PluginError = gutil.PluginError; var sanitize = require( 'sanitize-html' ); const PLUGIN_NAME = 'gulp-sanitize-translations.js'; module.exports = function( options ) { // Creating a stream through which each file will pass var stream = through.obj( function( file, enc, callback ) { if ( file.isBuffer() ) { var content = file.contents.toString(); var parsed = JSON.parse( content ); var lang = Object.keys( parsed )[0]; for ( var i in parsed[ lang ] ) { if ( Array.isArray( parsed[ lang ][ i ] ) ) { for ( var n in parsed[ lang ][ i ] ) { parsed[ lang ][ i ][ n ] = sanitize( parsed[ lang ][ i ][ n ] ); } } else { parsed[ lang ][ i ] = sanitize( parsed[ lang ][ i ] ); } } file.contents = new Buffer( JSON.stringify( parsed ), 'utf-8' ); } // Streams not supported. else if ( file.isStream() ) { this.emit( 'error', new gutil.PluginError( PLUGIN_NAME, 'Streaming not supported.' ) ); return callback(); } // Anything else just falls through. this.push( file ); return callback(); } ); // returning the file stream return stream; };
var through = require( 'through2' ); var gutil = require( 'gulp-util' ); var PluginError = gutil.PluginError; var sanitize = require( 'sanitize-html' ); const PLUGIN_NAME = 'gulp-sanitize-translations.js'; module.exports = function( options ) { // Creating a stream through which each file will pass var stream = through.obj( function( file, enc, callback ) { if ( file.isBuffer() ) { var content = file.contents.toString(); var parsed = JSON.parse( content ); var lang = Object.keys( parsed )[0]; for ( var i in parsed[ lang ] ) { parsed[ lang ][ i ] = sanitize( parsed[ lang ][ i ] ); } file.contents = new Buffer( JSON.stringify( parsed ), 'utf-8' ); } // Streams not supported. else if ( file.isStream() ) { this.emit( 'error', new gutil.PluginError( PLUGIN_NAME, 'Streaming not supported.' ) ); return callback(); } // Anything else just falls through. this.push( file ); return callback(); } ); // returning the file stream return stream; };
Change server port to be node convention
/* eslint-disable import/no-extraneous-dependencies */ const path = require('path'); const express = require('express'); const webpack = require('webpack'); const config = require('../webpack.config.dev'); const webpackDev = require('webpack-dev-middleware'); const webpackHot = require('webpack-hot-middleware'); const app = express(); const compiler = webpack(config); const PORT = process.env.PORT || 4000; app.use('/', express.static(path.join(__dirname, '..', 'examples'))); app.use(webpackDev(compiler, { noInfo: true, publicPath: config.output.publicPath, })); app.use(webpackHot(compiler)); app.get('/', (req, res) => { res.sendFile(path.join(__dirname, '..', 'examples/index.html')); }); app.listen(PORT, 'localhost', () => { process.stdout.write(`Listening at http://localhost:${PORT}\n`); });
/* eslint-disable import/no-extraneous-dependencies */ const path = require('path'); const express = require('express'); const webpack = require('webpack'); const config = require('../webpack.config.dev'); const webpackDev = require('webpack-dev-middleware'); const webpackHot = require('webpack-hot-middleware'); const app = express(); const compiler = webpack(config); const PORT = process.env.PORT || 3000; app.use('/', express.static(path.join(__dirname, '..', 'examples'))); app.use(webpackDev(compiler, { noInfo: true, publicPath: config.output.publicPath, })); app.use(webpackHot(compiler)); app.get('/', (req, res) => { res.sendFile(path.join(__dirname, '..', 'examples/index.html')); }); app.listen(PORT, 'localhost', () => { process.stdout.write(`Listening at http://localhost:${PORT}\n`); });
Add argument checks to pass tests
package com.grayben.riskExtractor.headingMarker.nominator; import java.util.Collections; import java.util.List; import com.grayben.riskExtractor.headingMarker.UnmodifiableText; public class NominatedText extends UnmodifiableText implements NomineesRetrievable { private List<Integer> nominees; public NominatedText(List<String> stringList, List<Integer> nominees) { super(stringList); if (nominees == null) { throw new NullPointerException("Attempted to pass null argument"); } this.nominees = nominees; } public NominatedText( UnmodifiableText unmodifiableText, List<Integer> nominees){ super(unmodifiableText); if (nominees == null) { throw new NullPointerException("Attempted to pass null argument"); } this.nominees = nominees; } public NominatedText(NominatedText nominatedText){ this(nominatedText, nominatedText.getNominees()); } @Override public List<Integer> getNominees() { return Collections.unmodifiableList(this.nominees); } }
package com.grayben.riskExtractor.headingMarker.nominator; import java.util.List; import com.grayben.riskExtractor.headingMarker.UnmodifiableText; public class NominatedText extends UnmodifiableText implements NomineesRetrievable { private List<Integer> nominees; public NominatedText(List<String> stringList, List<Integer> nominees) { super(stringList); this.nominees = nominees; } public NominatedText( UnmodifiableText unmodifiableText, List<Integer> nominees){ super(unmodifiableText); this.nominees = nominees; } public NominatedText(NominatedText nominatedText){ this(nominatedText, nominatedText.getNominees()); } @Override public List<Integer> getNominees() { return nominees; } }
Change additions from green to blue for accessibility it was mentioned on irc.perl.org#metacpan that red/green is bad for color blindness and the blue seems a bigger contrast.
;(function() { // CommonJS typeof(require) != 'undefined' ? SyntaxHighlighter = require('shCore').SyntaxHighlighter : null; function Brush() { this.regexList = [ { regex: /^\+\+\+ .*$/gm, css: 'color2' }, // new file { regex: /^\-\-\- .*$/gm, css: 'color2' }, // old file { regex: /^\s.*$/gm, css: 'color1' }, // unchanged { regex: /^@@.*@@.*$/gm, css: 'variable' }, // location { regex: /^\+.*$/gm, css: 'string' }, // additions { regex: /^\-.*$/gm, css: 'color3' } // deletions ]; }; Brush.prototype = new SyntaxHighlighter.Highlighter(); Brush.aliases = ['diff', 'patch']; SyntaxHighlighter.brushes.Diff = Brush; // CommonJS typeof(exports) != 'undefined' ? exports.Brush = Brush : null; })();
;(function() { // CommonJS typeof(require) != 'undefined' ? SyntaxHighlighter = require('shCore').SyntaxHighlighter : null; function Brush() { this.regexList = [ { regex: /^\+\+\+ .*$/gm, css: 'color2' }, // new file { regex: /^\-\-\- .*$/gm, css: 'color2' }, // old file { regex: /^\s.*$/gm, css: 'color1' }, // unchanged { regex: /^@@.*@@.*$/gm, css: 'variable' }, // location { regex: /^\+.*$/gm, css: 'comments' }, // additions { regex: /^\-.*$/gm, css: 'color3' } // deletions ]; }; Brush.prototype = new SyntaxHighlighter.Highlighter(); Brush.aliases = ['diff', 'patch']; SyntaxHighlighter.brushes.Diff = Brush; // CommonJS typeof(exports) != 'undefined' ? exports.Brush = Brush : null; })();
Make preview URLs include trailing slash when slug is given - the trailing slash isn't shown usually - slash added when a slug is given to be more correct
/* Example usage: {{gh-url-preview prefix="tag" slug=theSlugValue tagName="p" classNames="description"}} */ var urlPreview = Ember.Component.extend({ classNames: 'ghost-url-preview', prefix: null, slug: null, theUrl: null, generateUrl: function () { // Get the blog URL and strip the scheme var blogUrl = this.get('config').blogUrl, noSchemeBlogUrl = blogUrl.substr(blogUrl.indexOf('://') + 3), // Remove `http[s]://` // Get the prefix and slug values prefix = this.get('prefix') ? this.get('prefix') + '/' : '', slug = this.get('slug') ? this.get('slug') : '', // Join parts of the URL together with slashes theUrl = noSchemeBlogUrl + '/' + prefix + (slug ? slug + '/' : ''); this.set('the-url', theUrl); }.on('didInsertElement').observes('slug') }); export default urlPreview;
/* Example usage: {{gh-url-preview prefix="tag" slug=theSlugValue tagName="p" classNames="description"}} */ var urlPreview = Ember.Component.extend({ classNames: 'ghost-url-preview', prefix: null, slug: null, theUrl: null, generateUrl: function () { // Get the blog URL and strip the scheme var blogUrl = this.get('config').blogUrl, noSchemeBlogUrl = blogUrl.substr(blogUrl.indexOf('://') + 3), // Remove `http[s]://` // Get the prefix and slug values prefix = this.get('prefix') ? this.get('prefix') + '/' : '', slug = this.get('slug') ? this.get('slug') : '', // Join parts of the URL together with slashes theUrl = noSchemeBlogUrl + '/' + prefix + slug; this.set('the-url', theUrl); }.on('didInsertElement').observes('slug') }); export default urlPreview;
Exclude the test/ directory & files from being installed as a package.
"""setup.py file.""" import uuid from setuptools import setup, find_packages from pip.req import parse_requirements __author__ = 'David Barroso <dbarrosop@dravetech.com>' install_reqs = parse_requirements('requirements.txt', session=uuid.uuid1()) reqs = [str(ir.req) for ir in install_reqs] setup( name="napalm-nxos", version="0.5.2", packages=find_packages(exclude=["test", "test.*"]), author="David Barroso", author_email="dbarrosop@dravetech.com", description="Network Automation and Programmability Abstraction Layer with Multivendor support", classifiers=[ 'Topic :: Utilities', 'Programming Language :: Python', 'Operating System :: POSIX :: Linux', 'Operating System :: MacOS', ], url="https://github.com/napalm-automation/napalm-nxos", include_package_data=True, install_requires=reqs, )
"""setup.py file.""" import uuid from setuptools import setup, find_packages from pip.req import parse_requirements __author__ = 'David Barroso <dbarrosop@dravetech.com>' install_reqs = parse_requirements('requirements.txt', session=uuid.uuid1()) reqs = [str(ir.req) for ir in install_reqs] setup( name="napalm-nxos", version="0.5.2", packages=find_packages(), author="David Barroso", author_email="dbarrosop@dravetech.com", description="Network Automation and Programmability Abstraction Layer with Multivendor support", classifiers=[ 'Topic :: Utilities', 'Programming Language :: Python', 'Operating System :: POSIX :: Linux', 'Operating System :: MacOS', ], url="https://github.com/napalm-automation/napalm-nxos", include_package_data=True, install_requires=reqs, )
Allow singleton conf['part'] for strconcat
# pipestrconcat.py #aka stringbuilder # from pipe2py import util def pipe_strconcat(context, _INPUT, conf, **kwargs): """This source builds a string. Keyword arguments: context -- pipeline context _INPUT -- source generator conf: part -- parts Yields (_OUTPUT): string """ if not isinstance(conf['part'], list): #todo do we need to do this anywhere else? conf['part'] = [conf['part']] for item in _INPUT: s = "" for part in conf['part']: if "subkey" in part: s += item[part['subkey']] #todo: use this subkey check anywhere we can embed a module else: s += util.get_value(part, kwargs) yield s
# pipestrconcat.py #aka stringbuilder # from pipe2py import util def pipe_strconcat(context, _INPUT, conf, **kwargs): """This source builds a string. Keyword arguments: context -- pipeline context _INPUT -- source generator conf: part -- parts Yields (_OUTPUT): string """ for item in _INPUT: s = "" for part in conf['part']: if "subkey" in part: s += item[part['subkey']] #todo: use this subkey check anywhere we can embed a module else: s += util.get_value(part, kwargs) yield s
Allow only 2s timeout for PhantomJS
/* * Usage: phantomjs capture.js [width] [height] [url] [output] */ var system = require('system'); var args = system.args; if (args.length === 5) { var width = args[1]; var height = args[2]; var url = args[3]; var output = args[4]; var page = require('webpage').create(); page.viewportSize = { width: width, height: height }; page.settings.resourceTimeout = 2000; page.open(url, function() { page.evaluate(function() { var style = document.createElement('style'); style.innerHTML = [ '* {', 'animation: none !important;', 'transition: none !important;', '-webkit-animation: none !important;', '-webkit-transition: none !important;', '}'].join('\n'); document.body.appendChild(style); }); page.render(output); phantom.exit(); }); } else { console.log("Invalid argument!"); }
/* * Usage: phantomjs capture.js [width] [height] [url] [output] */ var system = require('system'); var args = system.args; if (args.length === 5) { var width = args[1]; var height = args[2]; var url = args[3]; var output = args[4]; var page = require('webpage').create(); page.viewportSize = { width: width, height: height }; page.open(url, function() { page.evaluate(function() { var style = document.createElement('style'); style.innerHTML = [ '* {', 'animation: none !important;', 'transition: none !important;', '-webkit-animation: none !important;', '-webkit-transition: none !important;', '}'].join('\n'); document.body.appendChild(style); }); page.render(output); phantom.exit(); }); } else { console.log("Invalid argument!"); }
Change order-details path and Order includes
(function(){ 'use strict'; angular.module('app.states') .run(appRun); /** @ngInject */ function appRun(routerHelper) { routerHelper.configureStates(getStates()); } function getStates() { return { 'orders.details': { url: '/details/:orderId', templateUrl: 'app/states/orders/details/details.html', controller: StateController, controllerAs: 'vm', title: 'Order Details', resolve: { order: resolveOrder } } }; } /** @ngInject */ function resolveOrder($stateParams, Order){ return Order.get({ id: $stateParams.orderId, 'includes[]': ['product', 'project', 'service'] }).$promise; } /** @ngInject */ function StateController(order) { var vm = this; vm.order = order; vm.activate = activate; function activate() { } } })();
(function(){ 'use strict'; angular.module('app.states') .run(appRun); /** @ngInject */ function appRun(routerHelper) { routerHelper.configureStates(getStates()); } function getStates() { return { 'orders.details': { url: '/:orderId', templateUrl: 'app/states/orders/details/details.html', controller: StateController, controllerAs: 'vm', title: 'Order Details', resolve: { order: resolveOrder } } }; } /** @ngInject */ function resolveOrder($stateParams, Order){ return Order.get({ id: $stateParams.orderId, 'includes[]': ['project'] }).$promise; } function StateController($state, lodash, order) { var vm = this; vm.order = order; vm.activate = activate; function activate() { } } })();
Support different platforms and architectures.
var gulp = require('gulp'), babel = require('gulp-babel'), runSequence = require('run-sequence'), rename = require('gulp-rename'), electron = require('gulp-atom-electron'), del = require('del'); gulp.task('transpile:app', function() { return gulp.src('main/index.es6.js') .pipe(babel()) .pipe(rename('index.js')) .pipe(gulp.dest('main')); }); gulp.task('clean', function(){ return del('package', {force: true}); }); gulp.task('copy:app', ['clean'], function(){ return gulp.src(['main/**/*', 'browser/**/*', 'package.json'], {base: '.'}) .pipe(gulp.dest('package')); }); gulp.task('build', function() { return gulp.src('package/**') .pipe(electron( { version: '0.30.3', platform: process.platform, arch: process.arch })) .pipe(electron.zfsdest('dist/es6-ng-electron.zip')); }); gulp.task('default', function(){ return runSequence('clean', 'transpile:app', 'copy:app','build'); });
var gulp = require('gulp'), babel = require('gulp-babel'), runSequence = require('run-sequence'), rename = require('gulp-rename'), electron = require('gulp-atom-electron'), del = require('del'); gulp.task('transpile:app', function() { return gulp.src('main/index.es6.js') .pipe(babel()) .pipe(rename('index.js')) .pipe(gulp.dest('main')); }); gulp.task('clean', function(){ return del('package', {force: true}); }); gulp.task('copy:app', ['clean'], function(){ return gulp.src(['main/**/*', 'browser/**/*', 'package.json'], {base: '.'}) .pipe(gulp.dest('package')); }); gulp.task('build', function() { return gulp.src('package/**') .pipe(electron({ version: '0.30.3', // build for OSX platform: 'darwin' })) .pipe(electron.zfsdest('dist/es6-ng-electron.zip')); }); gulp.task('default', function(){ return runSequence('clean', 'transpile:app', 'copy:app','build'); });
Document why `key` comes first
/* eslint-env node */ const FormData = require('form-data'); const got = require('got'); const BASE_URL = 'https://debuglogs.org'; // Workaround: Submitting `FormData` using native `FormData::submit` procedure // as integration with `got` results in S3 error saying we haven’t set the // `Content-Length` header: const submitFormData = (form, url) => new Promise((resolve, reject) => { form.submit(url, (error) => { if (error) { return reject(error); } return resolve(); }); }); // upload :: String -> Promise URL exports.upload = async (content) => { const signedForm = await got.get(BASE_URL, { json: true }); const { fields, url } = signedForm.body; const form = new FormData(); // The API expects `key` to be the first field: form.append('key', fields.key); Object.entries(fields) .filter(([key]) => key !== 'key') .forEach(([key, value]) => { form.append(key, value); }); const contentBuffer = Buffer.from(content, 'utf8'); const contentType = 'text/plain'; form.append('Content-Type', contentType); form.append('file', contentBuffer, { contentType, filename: 'signal-desktop-debug-log.txt', }); await submitFormData(form, url); return `${BASE_URL}/${fields.key}`; };
/* eslint-env node */ const FormData = require('form-data'); const got = require('got'); const BASE_URL = 'https://debuglogs.org'; // Workaround: Submitting `FormData` using native `FormData::submit` procedure // as integration with `got` results in S3 error saying we haven’t set the // `Content-Length` header: const submitFormData = (form, url) => new Promise((resolve, reject) => { form.submit(url, (error) => { if (error) { return reject(error); } return resolve(); }); }); // upload :: String -> Promise URL exports.upload = async (content) => { const signedForm = await got.get(BASE_URL, { json: true }); const { fields, url } = signedForm.body; const form = new FormData(); form.append('key', fields.key); Object.entries(fields) .filter(([key]) => key !== 'key') .forEach(([key, value]) => { form.append(key, value); }); const contentBuffer = Buffer.from(content, 'utf8'); const contentType = 'text/plain'; form.append('Content-Type', contentType); form.append('file', contentBuffer, { contentType, filename: 'signal-desktop-debug-log.txt', }); await submitFormData(form, url); return `${BASE_URL}/${fields.key}`; };
Clear reset key on user activity
from rorn.Box import LoginBox, ErrorBox, WarningBox, SuccessBox from rorn.Session import delay from User import User from Button import Button from utils import * @get('login') def login(handler, request): handler.title('Login') if handler.session['user']: print WarningBox('Logged In', 'You are already logged in as %s' % handler.session['user']) else: print LoginBox() @post('login') def loginPost(handler, request, p_username, p_password): handler.title('Login') user = User.load(username = p_username, password = User.crypt(p_username, p_password)) if user: if user.resetkey: user.resetkey = None user.save() handler.session['user'] = user delay(handler, SuccessBox("Login Complete", "Logged in as %s" % user)) redirect('/') else: delay(handler, ErrorBox("Login Failed", "Invalid username/password combination")) redirect('/') @get('logout') def logout(handler, request): print "<form method=\"post\" action=\"/logout\">" print Button('Logout', type = 'submit').negative() print "</form>" @post('logout') def logoutPost(handler, request): if handler.session['user']: del handler.session['user'] redirect('/') else: print ErrorBox("Logout Failed", "You are not logged in")
from rorn.Box import LoginBox, ErrorBox, WarningBox, SuccessBox from rorn.Session import delay from User import User from Button import Button from utils import * @get('login') def login(handler, request): handler.title('Login') if handler.session['user']: print WarningBox('Logged In', 'You are already logged in as %s' % handler.session['user']) else: print LoginBox() @post('login') def loginPost(handler, request, p_username, p_password): handler.title('Login') user = User.load(username = p_username, password = User.crypt(p_username, p_password)) if user: handler.session['user'] = user delay(handler, SuccessBox("Login Complete", "Logged in as %s" % user)) redirect('/') else: delay(handler, ErrorBox("Login Failed", "Invalid username/password combination")) redirect('/') @get('logout') def logout(handler, request): print "<form method=\"post\" action=\"/logout\">" print Button('Logout', type = 'submit').negative() print "</form>" @post('logout') def logoutPost(handler, request): if handler.session['user']: del handler.session['user'] redirect('/') else: print ErrorBox("Logout Failed", "You are not logged in")
Move helper functions inside 'interpret'
import match from './match'; import {matchOn} from './match'; // Convert a free monad representation into a JSON representation const interpret = (program) => { const start = {id: 0, nodes: []}; const process = (env, node) => matchOn('nodeName')(node)({ sourceAlpha: () => env, sourceGraphic: () => env, _: node => ({ id: env.id+1, nodes: [...env.nodes, { ...node.toJS(), result: env.id }], }) }); const result = (env, node) => matchOn('nodeName')(node)({ sourceAlpha: () => 'SourceAlpha', sourceGraphic: () => 'SourceGraphic', _: () => env.id }); return program.iterate(start, process, result).nodes; }; // Remove unused nodes from the output const prune = (nodes) => { const result = prune_once(nodes); return (result.length === nodes.length) ? result : prune(result); }; const prune_once = (nodes) => { const last = nodes[nodes.length - 1]; const is_output = node => node === last; const is_used = node => nodes .find(n => n.in === node.result || n.in2 === node.result); return nodes.filter(n => is_output(n) || is_used(n)); }; const run = program => prune(interpret(program)); export default run;
import match from './match'; import {matchOn} from './match'; // Convert a free monad representation into a JSON representation const process = (env, node) => matchOn('nodeName')(node)({ sourceAlpha: () => env, sourceGraphic: () => env, _: node => ({ id: env.id+1, nodes: [...env.nodes, { ...node.toJS(), result: env.id }], }) }); const result = (env, node) => matchOn('nodeName')(node)({ sourceAlpha: () => 'SourceAlpha', sourceGraphic: () => 'SourceGraphic', _: () => env.id }); const interpret = (program) => { const start = {id: 0, nodes: []}; return program.iterate(start, process, result).nodes; }; // Remove unused nodes from the output const prune = (nodes) => { const result = prune_once(nodes); return (result.length === nodes.length) ? result : prune(result); }; const prune_once = (nodes) => { const last = nodes[nodes.length - 1]; const is_output = node => node === last; const is_used = node => nodes .find(n => n.in === node.result || n.in2 === node.result); return nodes.filter(n => is_output(n) || is_used(n)); }; const run = program => prune(interpret(program)); export default run;
Use get_success_url to work around reverse_lazy issue on Python3.
from django.core.urlresolvers import reverse from vanilla import ListView, CreateView, DetailView, UpdateView, DeleteView from .forms import {{ cookiecutter.model_name }}Form from .models import {{ cookiecutter.model_name }} class {{ cookiecutter.model_name }}CRUDView(object): model = {{ cookiecutter.model_name }} form_class = {{ cookiecutter.model_name }}Form def get_success_url(self): return reverse('{{ cookiecutter.model_name|lower }}_list') class {{ cookiecutter.model_name }}List({{ cookiecutter.model_name }}CRUDView, ListView): pass class {{ cookiecutter.model_name }}Create({{ cookiecutter.model_name }}CRUDView, CreateView): pass class {{ cookiecutter.model_name }}Detail({{ cookiecutter.model_name }}CRUDView, DetailView): pass class {{ cookiecutter.model_name }}Update({{ cookiecutter.model_name }}CRUDView, UpdateView): pass class {{ cookiecutter.model_name }}Delete({{ cookiecutter.model_name }}CRUDView, DeleteView): pass
from django.core.urlresolvers import reverse_lazy from vanilla import ListView, CreateView, DetailView, UpdateView, DeleteView from .forms import {{ cookiecutter.model_name }}Form from .models import {{ cookiecutter.model_name }} class {{ cookiecutter.model_name }}CRUDView(object): model = {{ cookiecutter.model_name }} form_class = {{ cookiecutter.model_name }}Form success_url = reverse_lazy('{{ cookiecutter.model_name|lower }}_list') class {{ cookiecutter.model_name }}List({{ cookiecutter.model_name }}CRUDView, ListView): pass class {{ cookiecutter.model_name }}Create({{ cookiecutter.model_name }}CRUDView, CreateView): pass class {{ cookiecutter.model_name }}Detail({{ cookiecutter.model_name }}CRUDView, DetailView): pass class {{ cookiecutter.model_name }}Update({{ cookiecutter.model_name }}CRUDView, UpdateView): pass class {{ cookiecutter.model_name }}Delete({{ cookiecutter.model_name }}CRUDView, DeleteView): pass
Test that teardown methods are actually called
import uuid from handler_fixture import StationHandlerTestCase from groundstation.transfer.request_handlers import handle_fetchobject from groundstation.transfer.response_handlers import handle_terminate class TestHandlerTerminate(StationHandlerTestCase): def test_handle_terminate(self): # Write an object into the station oid = self.station.station.write("butts lol") self.station.payload = oid self.station.id = uuid.uuid1() self.assertEqual(len(self.station.station.registry.contents), 0) self.station.station.register_request(self.station) self.assertEqual(len(self.station.station.registry.contents), 1) handle_fetchobject(self.station) ret = [0] def _teardown(): ret[0] += 1 self.station.teardown = _teardown term = self.station.stream.pop() handle_terminate(term) self.assertEqual(len(self.station.station.registry.contents), 0) self.assertEqual(ret[0], 1)
import uuid from handler_fixture import StationHandlerTestCase from groundstation.transfer.request_handlers import handle_fetchobject from groundstation.transfer.response_handlers import handle_terminate class TestHandlerTerminate(StationHandlerTestCase): def test_handle_terminate(self): # Write an object into the station oid = self.station.station.write("butts lol") self.station.payload = oid self.station.id = uuid.uuid1() self.assertEqual(len(self.station.station.registry.contents), 0) self.station.station.register_request(self.station) self.assertEqual(len(self.station.station.registry.contents), 1) handle_fetchobject(self.station) term = self.station.stream.pop() handle_terminate(term) self.assertEqual(len(self.station.station.registry.contents), 0)
Remove schemes matcher from auth routes. It seems broken
package auth import ( "encoding/json" "fmt" "github.com/gorilla/mux" "net/http" ) const ( tokenCookieName = "access_token" ) func AddRoutes(r *mux.Router, service Service) { // Do not serve these routes over http. r.HandleFunc("/login", func(w http.ResponseWriter, r *http.Request) { decoder := json.NewDecoder(r.Body) req := LoginRequest{} err := decoder.Decode(&req) if err != nil { http.Error(w, fmt.Sprintf("failed to decode login request from request body: %v", err), 400) return } token, err := service.Login(req) if err != nil { // Explicitly do not pass up the reason for login failure. http.Error(w, "Invalid username or password.", 403) } signedString, err := service.Sign(token) if err != nil { http.Error(w, fmt.Sprintf("failed to issue token: %v", err), 503) } // Return token as a cookie. w.Header().Add("Set-Cookie", fmt.Sprintf("%v=%v; Path=/api; Secure; HttpOnly;", tokenCookieName, signedString)) w.WriteHeader(http.StatusNoContent) }).Methods("POST") }
package auth import ( "encoding/json" "fmt" "github.com/gorilla/mux" "net/http" ) const ( tokenCookieName = "access_token" ) func AddRoutes(r *mux.Router, service Service) { // Explicitly only serve login over https. r.HandleFunc("/login", func(w http.ResponseWriter, r *http.Request) { decoder := json.NewDecoder(r.Body) req := LoginRequest{} err := decoder.Decode(&req) if err != nil { http.Error(w, fmt.Sprintf("failed to decode login request from request body: %v", err), 400) return } token, err := service.Login(req) if err != nil { // Explicitly do not pass up the reason for login failure. http.Error(w, "Invalid username or password.", 403) } signedString, err := service.Sign(token) if err != nil { http.Error(w, fmt.Sprintf("failed to issue token: %v", err), 503) } // Return token as a cookie. w.Header().Add("Set-Cookie", fmt.Sprintf("%v=%v; Path=/api; Secure; HttpOnly;", tokenCookieName, signedString)) w.WriteHeader(http.StatusNoContent) }).Methods("POST").Schemes("https") }
Make all changes to app/ run on all trybot platforms, not just the big three. Anyone who's changing a header here may break the chromeos build. BUG=none TEST=none Review URL: http://codereview.chromium.org/2838027 git-svn-id: http://src.chromium.org/svn/trunk/src@51000 4ff67af0-8c30-449e-8e8b-ad334ec8d88c Former-commit-id: 7a0c0e6ed56e847b7b6300c1a0b4a427f26b296d
#!/usr/bin/python # Copyright (c) 2009 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Makes sure that the app/ code is cpplint clean.""" INCLUDE_CPP_FILES_ONLY = ( r'.*\.cc$', r'.*\.h$' ) EXCLUDE = ( # Autogenerated window resources files are off limits r'.*resource.h$', ) def CheckChangeOnUpload(input_api, output_api): results = [] black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDE sources = lambda x: input_api.FilterSourceFile( x, white_list=INCLUDE_CPP_FILES_ONLY, black_list=black_list) results.extend(input_api.canned_checks.CheckChangeLintsClean( input_api, output_api, sources)) return results def GetPreferredTrySlaves(): return ['win', 'linux', 'linux_view', 'linux_chromeos', 'mac']
#!/usr/bin/python # Copyright (c) 2009 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Makes sure that the app/ code is cpplint clean.""" INCLUDE_CPP_FILES_ONLY = ( r'.*\.cc$', r'.*\.h$' ) EXCLUDE = ( # Autogenerated window resources files are off limits r'.*resource.h$', ) def CheckChangeOnUpload(input_api, output_api): results = [] black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDE sources = lambda x: input_api.FilterSourceFile( x, white_list=INCLUDE_CPP_FILES_ONLY, black_list=black_list) results.extend(input_api.canned_checks.CheckChangeLintsClean( input_api, output_api, sources)) return results
Sort roles directly by name
const db = require('../lib/database'); const messageHelpers = require('../lib/message_helpers'); const Table = require('ascii-table'); const textHelpers = require('../lib/text_helpers'); module.exports = { bind: 'list_roles', handler: async function(message) { if (!message.guild) { await messageHelpers.sendError(message, 'This command must be run within a server.'); return; } const roleIds = await db.pool .query('SELECT discord_id FROM bot_roles'); const guildRoles = message.guild.roles.array(); let roleNames = []; for (let roleId of roleIds.rows) { const roleName = guildRoles .filter((el) => el.id == roleId.discord_id) .map((el) => el.name); if (roleName.length > 0) { roleNames.push(roleName[0]); } } roleNames = roleNames.sort(); const table = new Table(); for (let role of roleNames) { table.addRow(role, `${process.env.COMMAND_PREFIX}join_role ${role}`); } textHelpers.paginateMessage(message, table.toString()); return; }, help: 'List the roles that can be joined using join_role', };
const db = require('../lib/database'); const messageHelpers = require('../lib/message_helpers'); const Table = require('ascii-table'); const textHelpers = require('../lib/text_helpers'); module.exports = { bind: 'list_roles', handler: async function(message) { if (!message.guild) { await messageHelpers.sendError(message, 'This command must be run within a server.'); return; } const roleIds = await db.pool .query('SELECT discord_id FROM bot_roles ORDER BY sort_index ASC'); const guildRoles = message.guild.roles.array(); const table = new Table(); for (let roleId of roleIds.rows) { const roleNames = guildRoles .filter((el) => el.id == roleId.discord_id) .map((el) => el.name); if (roleNames.length > 0) { table.addRow(roleNames[0], `${process.env.COMMAND_PREFIX}join_role ${roleNames[0]}`); } } textHelpers.paginateMessage(message, table.toString()); return; }, help: 'List the roles that can be joined using join_role', };
Fix Texture objects in BaseTextureCache
var core = require('../core'); module.exports = function () { return function (resource, next) { // create a new texture if the data is an Image object if (resource.data && resource.isImage) { var baseTexture = new core.BaseTexture(resource.data, null, core.utils.getResolutionOfUrl(resource.url)); baseTexture.imageUrl = resource.url; resource.texture = new core.Texture(baseTexture); // lets also add the frame to pixi's global cache for fromFrame and fromImage fucntions core.utils.BaseTextureCache[resource.url] = baseTexture; core.utils.TextureCache[resource.url] = resource.texture; } next(); }; };
var core = require('../core'); module.exports = function () { return function (resource, next) { // create a new texture if the data is an Image object if (resource.data && resource.isImage) { var baseTexture = new core.BaseTexture(resource.data, null, core.utils.getResolutionOfUrl(resource.url)); baseTexture.imageUrl = resource.url; resource.texture = new core.Texture(baseTexture); // lets also add the frame to pixi's global cache for fromFrame and fromImage fucntions core.utils.BaseTextureCache[resource.url] = resource.texture; core.utils.TextureCache[resource.url] = resource.texture; } next(); }; };
Add uid and another URL property. Signed-off-by: François de Metz <5187da0b934cc25eb2201a3ec9206c24b13cb23b@stormz.me>
var icalendar = require('icalendar'); exports.generateIcal = function(boards) { var ical = new icalendar.iCalendar(); boards.each(function(board) { board.cards().each(function(card) { // no arm, no chocolate if (!card.get('badges').due) return; var event = new icalendar.VEvent(card.id); event.setSummary(card.get('name')); event.setDescription(card.get('description')); event.setDate(card.get('badges').due); event.addProperty('ATTACH', card.get('url')); event.addProperty('URL', card.get('url')); ical.addComponent(event); }); }); return ical; }
var icalendar = require('icalendar'); exports.generateIcal = function(boards) { var ical = new icalendar.iCalendar(); boards.each(function(board) { board.cards().each(function(card) { // no arm, no chocolate if (!card.get('badges').due) return; var event = new icalendar.VEvent(); event.setSummary(card.get('name')); event.setDescription(card.get('description')); event.setDate(card.get('badges').due); event.addProperty('ATTACH', card.get('url')); ical.addComponent(event); }); }); return ical; }
Test commit from another location.
<?php namespace Affinity\AppBundle\Controller; use Symfony\Bundle\FrameworkBundle\Controller\Controller; use Symfony\Component\Security\Core\SecurityContext; use Symfony\Component\HttpFoundation\Response; use Affinity\AppBundle\Entity\Employee; class EmployeeController extends Controller { public function loginFormAction() { $request = $this->getRequest(); $session = $request->getSession(); // Adding a comment for a commit! $stuff = new \Affinity\AppBundle\DependencyInjection\AffinityAppExtension; if( $request->attributes->has( SecurityContext::AUTHENTICATION_ERROR )) { $error = $request->attributes->get( SecurityContext::AUTHENTICATION_ERROR ); } else { $error = $session->get( SecurityContext::AUTHENTICATION_ERROR ); $session->remove( SecurityContext::AUTHENTICATION_ERROR ); } return $this->render( 'AffinityAppBundle:Employee:login.html.twig', array( 'last_username' => $session->get( SecurityContext::LAST_USERNAME), 'error' => $error )); } }
<?php namespace Affinity\AppBundle\Controller; use Symfony\Bundle\FrameworkBundle\Controller\Controller; use Symfony\Component\Security\Core\SecurityContext; use Symfony\Component\HttpFoundation\Response; use Affinity\AppBundle\Entity\Employee; class EmployeeController extends Controller { public function loginFormAction() { $request = $this->getRequest(); $session = $request->getSession(); $stuff = new \Affinity\AppBundle\DependencyInjection\AffinityAppExtension; if( $request->attributes->has( SecurityContext::AUTHENTICATION_ERROR )) { $error = $request->attributes->get( SecurityContext::AUTHENTICATION_ERROR ); } else { $error = $session->get( SecurityContext::AUTHENTICATION_ERROR ); $session->remove( SecurityContext::AUTHENTICATION_ERROR ); } return $this->render( 'AffinityAppBundle:Employee:login.html.twig', array( 'last_username' => $session->get( SecurityContext::LAST_USERNAME), 'error' => $error )); } }
Fix the handling of missing prefixes Twisted defaults to an empty string, while IRCBase defaults to None.
# Taken from txircd: # https://github.com/ElementalAlchemist/txircd/blob/8832098149b7c5f9b0708efe5c836c8160b0c7e6/txircd/utils.py#L9 def _enum(**enums): return type('Enum', (), enums) ModeType = _enum(LIST=0, PARAM_SET=1, PARAM_UNSET=2, NO_PARAM=3) ModuleLoadType = _enum(LOAD=0, UNLOAD=1, ENABLE=2, DISABLE=3) def isNumber(s): try: float(s) return True except ValueError: return False def parseUserPrefix(prefix): if prefix is None: prefix = "" if "!" in prefix: nick = prefix[:prefix.find("!")] ident = prefix[prefix.find("!") + 1:prefix.find("@")] host = prefix[prefix.find("@") + 1:] return nick, ident, host # Not all "users" have idents and hostnames nick = prefix return nick, None, None def networkName(bot, server): return bot.servers[server].supportHelper.network
# Taken from txircd: # https://github.com/ElementalAlchemist/txircd/blob/8832098149b7c5f9b0708efe5c836c8160b0c7e6/txircd/utils.py#L9 def _enum(**enums): return type('Enum', (), enums) ModeType = _enum(LIST=0, PARAM_SET=1, PARAM_UNSET=2, NO_PARAM=3) ModuleLoadType = _enum(LOAD=0, UNLOAD=1, ENABLE=2, DISABLE=3) def isNumber(s): try: float(s) return True except ValueError: return False def parseUserPrefix(prefix): if "!" in prefix: nick = prefix[:prefix.find("!")] ident = prefix[prefix.find("!") + 1:prefix.find("@")] host = prefix[prefix.find("@") + 1:] return nick, ident, host # Not all "users" have idents and hostnames nick = prefix return nick, None, None def networkName(bot, server): return bot.servers[server].supportHelper.network
Fix assertion for Testutils to check on sqlite://:memory: The current uses sqlite://memory to test a connection. This is a faulty path to memory. Corrent path is sqlite://:memory: Change-Id: I950521f9b9c6aa8ae73be24121a836e84e409ca2
# Copyright (c) 2014 Rackspace Hosting, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import ddt from marconi.queues.storage import utils from marconi import tests as testing @ddt.ddt class TestUtils(testing.TestBase): @testing.requires_mongodb def test_can_connect_suceeds_if_good_uri_mongo(self): self.assertTrue(utils.can_connect('mongodb://localhost:27017')) def test_can_connect_suceeds_if_good_uri_sqlite(self): self.assertTrue(utils.can_connect('sqlite://:memory:')) @ddt.data( 'mongodb://localhost:27018', # wrong port 'localhost:27017', # missing scheme 'redis://localhost:6379' # not supported with default install ) @testing.requires_mongodb def test_can_connect_fails_if_bad_uri(self, uri): self.assertFalse(utils.can_connect(uri))
# Copyright (c) 2014 Rackspace Hosting, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import ddt from marconi.queues.storage import utils from marconi import tests as testing @ddt.ddt class TestUtils(testing.TestBase): @testing.requires_mongodb def test_can_connect_suceeds_if_good_uri_mongo(self): self.assertTrue(utils.can_connect('mongodb://localhost:27017')) def test_can_connect_suceeds_if_good_uri_sqlite(self): self.assertTrue(utils.can_connect('sqlite://memory')) @ddt.data( 'mongodb://localhost:27018', # wrong port 'localhost:27017', # missing scheme 'redis://localhost:6379' # not supported with default install ) @testing.requires_mongodb def test_can_connect_fails_if_bad_uri(self, uri): self.assertFalse(utils.can_connect(uri))
Implement simple logging to console.
/** * Copyright 2012 The PlayN Authors * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package playn.ios; import cli.System.Console; import playn.core.Log; class IOSLog implements Log { // TODO: stack traces @Override public void debug(String msg) { Console.WriteLine("DEBUG: " + msg); } @Override public void debug(String msg, Throwable e) { debug(msg + ": " + e.getMessage()); } @Override public void info(String msg) { Console.WriteLine(msg); } @Override public void info(String msg, Throwable e) { info(msg + ": " + e.getMessage()); } @Override public void warn(String msg) { Console.WriteLine("WARN: " + msg); } @Override public void warn(String msg, Throwable e) { warn(msg + ": " + e.getMessage()); } @Override public void error(String msg) { Console.WriteLine("ERROR: " + msg); } @Override public void error(String msg, Throwable e) { error(msg + ": " + e.getMessage()); } }
/** * Copyright 2012 The PlayN Authors * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package playn.ios; import playn.core.Log; class IOSLog implements Log { @Override public void error(String msg, Throwable e) { throw new RuntimeException("TODO"); } @Override public void error(String msg) { throw new RuntimeException("TODO"); } @Override public void info(String msg) { throw new RuntimeException("TODO"); } @Override public void info(String msg, Throwable e) { throw new RuntimeException("TODO"); } @Override public void debug(String msg) { throw new RuntimeException("TODO"); } @Override public void debug(String msg, Throwable e) { throw new RuntimeException("TODO"); } @Override public void warn(String msg) { throw new RuntimeException("TODO"); } @Override public void warn(String msg, Throwable e) { throw new RuntimeException("TODO"); } }
Fix the session variable setting.
package com.ticktac.utils; import java.io.IOException; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import com.ticktac.business.User; import com.ticktac.data.UserDAO; public class LogInRequestHandler implements RequestHandler { UserDAO userDAO; public LogInRequestHandler() { userDAO = new UserDAO(); } public String handleRequest(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { String view = ""; String email = (String)request.getParameter("email"); String password = (String)request.getParameter("password"); if(email == null || password == null) view = "login.jsp"; else { User userBean = userDAO.searchUser(email, password); if(userBean == null) { request.getServletContext().setAttribute("accountExists", false); view = "login.jsp"; } else { request.setAttribute("accountExists", true); request.getSession().setAttribute("user", userBean.getName()); request.setAttribute("userBean", userBean); view = "welcome.jsp"; } } return view; } }
package com.ticktac.utils; import java.io.IOException; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import com.ticktac.business.User; import com.ticktac.data.UserDAO; public class LogInRequestHandler implements RequestHandler { UserDAO userDAO; public LogInRequestHandler() { userDAO = new UserDAO(); } public String handleRequest(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { String view = ""; String email = (String)request.getParameter("email"); String password = (String)request.getParameter("password"); if(email == null || password == null) view = "login.jsp"; else { User userBean = userDAO.searchUser(email, password); if(userBean == null) { request.getServletContext().setAttribute("accountExists", false); view = "login.jsp"; } else { request.setAttribute("accountExists", true); request.getSession().setAttribute("user", request.getParameter(userBean.getName())); request.setAttribute("userBean", userBean); view = "welcome.jsp"; } } return view; } }
Add missing done() on success
/** * grunt-dco * * Copyright (c) 2014 Rafael Xavier de Souza * Licensed under the MIT license. */ "use strict"; module.exports = function(grunt) { var dco = require("dco"); grunt.registerMultiTask("dco", "Validate DCO signatures in git repositories.", function() { var done = this.async(); // Merge task-specific and/or target-specific options with these defaults. var options = this.options({ path: "." }); dco.getCommitErrors(options, function(error, errors) { if (error) { grunt.log.error("Internal error " + error.stack); return done(error); } if (errors.length) { errors.forEach(function( error ) { grunt.log.error( error ); }); return done(new Error("Invalid commits.")); } grunt.log.ok("All commits have appropriate licensing."); done(); }); }); };
/** * grunt-dco * * Copyright (c) 2014 Rafael Xavier de Souza * Licensed under the MIT license. */ "use strict"; module.exports = function(grunt) { var dco = require("dco"); grunt.registerMultiTask("dco", "Validate DCO signatures in git repositories.", function() { var done = this.async(); // Merge task-specific and/or target-specific options with these defaults. var options = this.options({ path: "." }); dco.getCommitErrors(options, function(error, errors) { if (error) { grunt.log.error("Internal error " + error.stack); return done(error); } if (errors.length) { errors.forEach(function( error ) { grunt.log.error( error ); }); return done(new Error("Invalid commits.")); } grunt.log.ok("All commits have appropriate licensing."); }); }); };
Change name of a function for a more explicit
<?php namespace Cnerta\BehindAProxyBundle\Listener; use Symfony\Component\HttpKernel\HttpKernelInterface; use Symfony\Component\HttpKernel\KernelEvents; use Symfony\Component\HttpKernel\Event\GetResponseEvent; use Symfony\Component\EventDispatcher\EventSubscriberInterface; use Cnerta\BehindAProxyBundle\Services\ProxyService; /** * */ class ProxyListener implements EventSubscriberInterface { /** * @var Cnerta\BehindAProxyBundle\Services\ProxyService */ protected $proxyService; function __construct(ProxyService $proxyService) { $this->proxyService = $proxyService; } /** * Handles security. * * @param GetResponseEvent $event An GetResponseEvent instance */ public function onKernelRequest(GetResponseEvent $event) { if (HttpKernelInterface::MASTER_REQUEST == $event->getRequestType()) { $this->proxyService->loadDefaultStreamContext(); } } public static function getSubscribedEvents() { return array(KernelEvents::REQUEST => array('onKernelRequest', 8)); } }
<?php namespace Cnerta\BehindAProxyBundle\Listener; use Symfony\Component\HttpKernel\HttpKernelInterface; use Symfony\Component\HttpKernel\KernelEvents; use Symfony\Component\HttpKernel\Event\GetResponseEvent; use Symfony\Component\EventDispatcher\EventSubscriberInterface; use Cnerta\BehindAProxyBundle\Services\ProxyService; /** * */ class ProxyListener implements EventSubscriberInterface { /** * @var Cnerta\BehindAProxyBundle\Services\ProxyService */ protected $proxyService; function __construct(ProxyService $proxyService) { $this->proxyService = $proxyService; } /** * Handles security. * * @param GetResponseEvent $event An GetResponseEvent instance */ public function onKernelRequest(GetResponseEvent $event) { if (HttpKernelInterface::MASTER_REQUEST == $event->getRequestType()) { $this->proxyService->streamContextSetDefault(); } } public static function getSubscribedEvents() { return array(KernelEvents::REQUEST => array('onKernelRequest', 8)); } }
Fix a block condition where cb is called immediately
package core import ( "fmt" ctypes "github.com/tendermint/tendermint/rpc/core/types" "github.com/tendermint/tendermint/types" tmsp "github.com/tendermint/tmsp/types" ) //----------------------------------------------------------------------------- // NOTE: tx must be signed func BroadcastTxAsync(tx types.Tx) (*ctypes.ResultBroadcastTx, error) { err := mempoolReactor.BroadcastTx(tx, nil) if err != nil { return nil, fmt.Errorf("Error broadcasting transaction: %v", err) } return &ctypes.ResultBroadcastTx{}, nil } // Note: tx must be signed func BroadcastTxSync(tx types.Tx) (*ctypes.ResultBroadcastTx, error) { resCh := make(chan *tmsp.Response, 1) err := mempoolReactor.BroadcastTx(tx, func(res *tmsp.Response) { resCh <- res }) if err != nil { return nil, fmt.Errorf("Error broadcasting transaction: %v", err) } res := <-resCh return &ctypes.ResultBroadcastTx{ Code: res.Code, Data: res.Data, Log: res.Log, }, nil } func UnconfirmedTxs() (*ctypes.ResultUnconfirmedTxs, error) { txs, err := mempoolReactor.Mempool.Reap() return &ctypes.ResultUnconfirmedTxs{len(txs), txs}, err }
package core import ( "fmt" ctypes "github.com/tendermint/tendermint/rpc/core/types" "github.com/tendermint/tendermint/types" tmsp "github.com/tendermint/tmsp/types" ) //----------------------------------------------------------------------------- // NOTE: tx must be signed func BroadcastTxAsync(tx types.Tx) (*ctypes.ResultBroadcastTx, error) { err := mempoolReactor.BroadcastTx(tx, nil) if err != nil { return nil, fmt.Errorf("Error broadcasting transaction: %v", err) } return &ctypes.ResultBroadcastTx{}, nil } // Note: tx must be signed func BroadcastTxSync(tx types.Tx) (*ctypes.ResultBroadcastTx, error) { resCh := make(chan *tmsp.Response) err := mempoolReactor.BroadcastTx(tx, func(res *tmsp.Response) { resCh <- res }) if err != nil { return nil, fmt.Errorf("Error broadcasting transaction: %v", err) } res := <-resCh return &ctypes.ResultBroadcastTx{ Code: res.Code, Data: res.Data, Log: res.Log, }, nil } func UnconfirmedTxs() (*ctypes.ResultUnconfirmedTxs, error) { txs, err := mempoolReactor.Mempool.Reap() return &ctypes.ResultUnconfirmedTxs{len(txs), txs}, err }
Add comment and alternative way to generate 401 response
<?php namespace Tyrell; use Tonic\Resource, Tonic\Response, Tonic\UnauthorizedException; /** * Simple HTTP authentication example. * * The condition annotation @secure maps to the secure() method allowing us to easily * secure the mySecret() method with the given username and password * * @uri /secret */ class Secret extends Resource { /** * @method GET * @secure aUser aPassword */ function mySecret() { return 'My secret'; } function secure($username, $password) { if ( isset($_SERVER['PHP_AUTH_USER']) && $_SERVER['PHP_AUTH_USER'] == $username && isset($_SERVER['PHP_AUTH_PW']) && $_SERVER['PHP_AUTH_PW'] == $password ) { return; } #return new Response(401, 'No entry', array('wwwAuthenticate' => 'Basic realm="My Realm"')); throw new UnauthorizedException('No entry'); } }
<?php namespace Tyrell; use Tonic\Resource, Tonic\Response, Tonic\UnauthorizedException; /** * @uri /secret */ class Secret extends Resource { /** * @method GET * @secure aUser aPassword */ function mySecret() { return 'My secret'; } function secure($username, $password) { if ( isset($_SERVER['PHP_AUTH_USER']) && $_SERVER['PHP_AUTH_USER'] == $username && isset($_SERVER['PHP_AUTH_PW']) && $_SERVER['PHP_AUTH_PW'] == $password ) { return; } throw new UnauthorizedException; } }
test(lorax): Fix tests due to eslint problems
'use strict'; import test from 'ava'; import * as lorax from '../index'; import * as fs from 'fs'; test.afterEach.cb(t => { fs.stat('test.md', (err, stats) => { if (stats) { fs.unlink('test.md', t.end); } else { t.end(); } }); }); test('get logs', t => { return lorax.get("^fix|^feature|^refactor|BREAKING") .then((log) => { t.ok(log); }); }); test('get logs since a certain tag', t => { const grepString = '^fix|^feature|^refactor|BREAKING' const grepRegex = new RegExp(grepString); return lorax.get(grepString, 'v0.1.3') .then((data) => { t.plan(data.length - 1); data.forEach((commit) => { if (!commit) return; const lines = commit.split('\n'); t.ok(grepRegex.test(lines[1])); }); }); }); test.cb('should write to file', t => { lorax.generate('vtest', 'test.md', {since: 'v0.1.3'}) .then(() => { fs.readFile('test.md', t.end); }); });
'use strict'; import test from 'ava'; import * as lorax from '../index'; import * as fs from 'fs'; test.afterEach.cb(t => { fs.stat('test.md', (err, stats) => { if (stats) { fs.unlink('test.md', t.end); } else { t.end(); } }); }); test('get logs', async t => { const log = lorax.get("^fix|^feature|^refactor|BREAKING"); t.ok(await log); }); test('get logs since a certain tag', async t => { const grepString = '^fix|^feature|^refactor|BREAKING' const grepRegex = new RegExp(grepString); const log = lorax.get(grepString, 'v0.1.3'); const data = await log; t.plan(data.length - 1); data.forEach((commit) => { if (!commit) return; const lines = commit.split('\n'); t.ok(grepRegex.test(lines[1])); }); }); test.cb('should write to file', t => { lorax.generate('vtest', 'test.md', {since: 'v0.1.3'}) .then(() => { fs.readFile('test.md', t.end); }); });
Update Hungarian translation for new strings Updated Hungarian translation.
define(function () { // Hungarian return { errorLoading: function () { return 'Az eredmények betöltése nem sikerült.'; }, inputTooLong: function (args) { var overChars = args.input.length - args.maximum; return 'Túl hosszú. ' + overChars + ' karakterrel több, mint kellene.'; }, inputTooShort: function (args) { var remainingChars = args.minimum - args.input.length; return 'Túl rövid. Még ' + remainingChars + ' karakter hiányzik.'; }, loadingMore: function () { return 'Töltés…'; }, maximumSelected: function (args) { return 'Csak ' + args.maximum + ' elemet lehet kiválasztani.'; }, noResults: function () { return 'Nincs találat.'; }, searching: function () { return 'Keresés…'; }, removeAllItems: function () { return 'Távolítson el minden elemet'; }, removeItem: function () { return 'Elem eltávolítása'; }, search: function() { return 'Keresés'; } }; });
define(function () { // Hungarian return { errorLoading: function () { return 'Az eredmények betöltése nem sikerült.'; }, inputTooLong: function (args) { var overChars = args.input.length - args.maximum; return 'Túl hosszú. ' + overChars + ' karakterrel több, mint kellene.'; }, inputTooShort: function (args) { var remainingChars = args.minimum - args.input.length; return 'Túl rövid. Még ' + remainingChars + ' karakter hiányzik.'; }, loadingMore: function () { return 'Töltés…'; }, maximumSelected: function (args) { return 'Csak ' + args.maximum + ' elemet lehet kiválasztani.'; }, noResults: function () { return 'Nincs találat.'; }, searching: function () { return 'Keresés…'; }, removeAllItems: function () { return 'Távolítson el minden elemet'; } }; });
Add static `rename` method to facade docblocks This method is statically accessible from the schema facade. But IDE's are not recognizing it.
<?php namespace Illuminate\Support\Facades; /** * @method static \Illuminate\Database\Schema\Builder create(string $table, \Closure $callback) * @method static \Illuminate\Database\Schema\Builder drop(string $table) * @method static \Illuminate\Database\Schema\Builder dropIfExists(string $table) * @method static \Illuminate\Database\Schema\Builder table(string $table, \Closure $callback) * @method static \Illuminate\Database\Schema\Builder rename(string $from, string $to) * @method static void defaultStringLength(int $length) * @method static \Illuminate\Database\Schema\Builder disableForeignKeyConstraints() * @method static \Illuminate\Database\Schema\Builder enableForeignKeyConstraints() * * @see \Illuminate\Database\Schema\Builder */ class Schema extends Facade { /** * Get a schema builder instance for a connection. * * @param string $name * @return \Illuminate\Database\Schema\Builder */ public static function connection($name) { return static::$app['db']->connection($name)->getSchemaBuilder(); } /** * Get a schema builder instance for the default connection. * * @return \Illuminate\Database\Schema\Builder */ protected static function getFacadeAccessor() { return static::$app['db']->connection()->getSchemaBuilder(); } }
<?php namespace Illuminate\Support\Facades; /** * @method static \Illuminate\Database\Schema\Builder create(string $table, \Closure $callback) * @method static \Illuminate\Database\Schema\Builder drop(string $table) * @method static \Illuminate\Database\Schema\Builder dropIfExists(string $table) * @method static \Illuminate\Database\Schema\Builder table(string $table, \Closure $callback) * @method static void defaultStringLength(int $length) * @method static \Illuminate\Database\Schema\Builder disableForeignKeyConstraints() * @method static \Illuminate\Database\Schema\Builder enableForeignKeyConstraints() * * @see \Illuminate\Database\Schema\Builder */ class Schema extends Facade { /** * Get a schema builder instance for a connection. * * @param string $name * @return \Illuminate\Database\Schema\Builder */ public static function connection($name) { return static::$app['db']->connection($name)->getSchemaBuilder(); } /** * Get a schema builder instance for the default connection. * * @return \Illuminate\Database\Schema\Builder */ protected static function getFacadeAccessor() { return static::$app['db']->connection()->getSchemaBuilder(); } }
Rename of class, adjust scope
<?php /** * update * * Monitor statistics detailing how web servers are currently performing and store those * statistics in a database. * * PHP Thread References * * MULTITHREADING IN PHP: DOING IT RIGHT! * - http://masnun.com/2013/12/15/multithreading-in-php-doing-it-right.html */ date_default_timezone_set('America/New_York'); // Define application enviroment require_once __DIR__ . '/config/config.mode.inc'; // Based on application mode, load approprate configuration settings $mode = getenv("APP_MODE"); require_once __DIR__ . '/config/config.' . $mode . '.inc'; // Load up the Composer autoload magic require_once __DIR__ . '/vendor/autoload.php'; use UberSmith\ServerStatus\ServerUtil; use UberSmith\ServerStatus\RequestStatusAsync; try { // Load target sites from database $serverUtil = new ServerUtil(); $targetServers = $serverUtil->gatherServers(); try { $requestStatus = new RequestStatusAsync($targetServers); $results = $requestStatus->sendRequests(); } catch(Exception $e) { echo '- Status Check Exception: ' . $e->getMessage(); } // Write results to database } catch(Exception $e) { echo '- Pre or Post flight Exception: ' . $e->getMessage(); }
<?php /** * update * * Monitor statistics detailing how web servers are currently performing and store those * statistics in a database. * * PHP Thread References * * MULTITHREADING IN PHP: DOING IT RIGHT! * - http://masnun.com/2013/12/15/multithreading-in-php-doing-it-right.html */ date_default_timezone_set('America/New_York'); // Define application enviroment require_once __DIR__ . '/config/config.mode.inc'; // Based on application mode, load approprate configuration settings $mode = getenv("APP_MODE"); require_once __DIR__ . '/config/config.' . $mode . '.inc'; // Load up the Composer autoload magic require_once __DIR__ . '/vendor/autoload.php'; use UberSmith\ServerStatus\ServerUtil; use UberSmith\ServerStatus\StatusRequestThread; try { // Load target sites from database $serverUtil = new ServerUtil(); $targetServers = $serverUtil->gatherServers(); try { $requestStatus = new RequestStatusAsync($targetServers); $results = $requestStatus->sendRequests(); } catch(Exception $e) { echo '- Status Check Exception: ' . $e->getMessage(); } // Write results to database } catch(Exception $e) { echo '- Pre or Post flight Exception: ' . $e->getMessage(); }
Use Werkzeug's safe_str_cmp() instead of hmac.compare_digest() Werkzeug will use the latter on Python >2.7.7, and provides a fallback for older Python versions.
import bcrypt from sqlalchemy.schema import Column from sqlalchemy.types import Integer, String from sqlalchemy.ext.declarative import declarative_base from werkzeug.security import safe_str_cmp __all__ = ['Base', 'Account'] Base = declarative_base() MAX_STR_LEN = 256 class Account(Base): __tablename__ = 'accounts' id = Column(Integer, primary_key=True) username = Column(String(MAX_STR_LEN), unique=True) password_hash = Column(String(MAX_STR_LEN)) def set_password(self, password): if isinstance(password, unicode): password = password.encode('utf-8') self.password_hash = bcrypt.hashpw(password, bcrypt.gensalt()) def is_password_valid(self, password): if isinstance(password, unicode): password = password.encode('utf-8') hashed = self.password_hash.encode('utf-8') return safe_str_cmp(bcrypt.hashpw(password, hashed), hashed)
import hmac import bcrypt from sqlalchemy.schema import Column from sqlalchemy.types import Integer, String from sqlalchemy.ext.declarative import declarative_base __all__ = ['Base', 'Account'] Base = declarative_base() MAX_STR_LEN = 256 class Account(Base): __tablename__ = 'accounts' id = Column(Integer, primary_key=True) username = Column(String(MAX_STR_LEN), unique=True) password_hash = Column(String(MAX_STR_LEN)) def set_password(self, password): if isinstance(password, unicode): password = password.encode('utf-8') self.password_hash = bcrypt.hashpw(password, bcrypt.gensalt()) def is_password_valid(self, password): if isinstance(password, unicode): password = password.encode('utf-8') hashed = self.password_hash.encode('utf-8') return hmac.compare_digest(bcrypt.hashpw(password, hashed), hashed)
Make block comment a JavaDoc comment git-svn-id: ef215b97ec449bc9c69e2ae1448853f14b3d8f41@1651134 13f79535-47bb-0310-9956-ffa450edef68
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.imaging.formats.jpeg.iptc; /** * Represents an IPTC block, a set of key-value pairs of Photoshop IPTC data. */ public class IptcBlock { public final int blockType; // Only currently used by classes in the package final byte[] blockNameBytes; // TODO make private and provide getter? final byte[] blockData; // TODO make private and provide getter? public IptcBlock(final int blockType, final byte[] blockNameBytes, final byte[] blockData) { this.blockData = blockData; this.blockNameBytes = blockNameBytes; this.blockType = blockType; } public boolean isIPTCBlock() { return blockType == IptcConstants.IMAGE_RESOURCE_BLOCK_IPTC_DATA; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.imaging.formats.jpeg.iptc; /* * Represents an IPTC block, a set of key-value pairs of Photoshop IPTC data. */ public class IptcBlock { public final int blockType; // Only currently used by classes in the package final byte[] blockNameBytes; // TODO make private and provide getter? final byte[] blockData; // TODO make private and provide getter? public IptcBlock(final int blockType, final byte[] blockNameBytes, final byte[] blockData) { this.blockData = blockData; this.blockNameBytes = blockNameBytes; this.blockType = blockType; } public boolean isIPTCBlock() { return blockType == IptcConstants.IMAGE_RESOURCE_BLOCK_IPTC_DATA; } }
Drop TODO for getVmVersion method Review URL: https://codereview.chromium.org/12324002
// Copyright (c) 2009 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.sdk; import java.io.IOException; import org.chromium.sdk.util.MethodIsBlockingException; /** * Abstraction of a remote JavaScript virtual machine which is embedded into * some application and accessed via TCP/IP connection to a port opened by * DebuggerAgent. Clients can use it to conduct debugging process. */ public interface StandaloneVm extends JavascriptVm { /** * Connects to the target VM. * * @param listener to report the debug events to * @throws IOException if there was a transport layer error * @throws UnsupportedVersionException if the SDK protocol version is not * compatible with that supported by the browser * @throws MethodIsBlockingException because initialization implies couple of remote calls * (to request version etc) */ void attach(DebugEventListener listener) throws IOException, UnsupportedVersionException, MethodIsBlockingException; /** * @return name of embedding application as it wished to name itself; might be null */ String getEmbedderName(); /** * This version should correspond to {@link JavascriptVm#getVersion()}. However it gets available * earlier, at the transport handshake stage. * @return version of V8 implementation, format is unspecified; must not be null if * {@link StandaloneVm} has been attached */ String getVmVersion(); /** * @return message explaining why VM is detached; may be null */ String getDisconnectReason(); }
// Copyright (c) 2009 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.sdk; import java.io.IOException; import org.chromium.sdk.util.MethodIsBlockingException; /** * Abstraction of a remote JavaScript virtual machine which is embedded into * some application and accessed via TCP/IP connection to a port opened by * DebuggerAgent. Clients can use it to conduct debugging process. */ public interface StandaloneVm extends JavascriptVm { /** * Connects to the target VM. * * @param listener to report the debug events to * @throws IOException if there was a transport layer error * @throws UnsupportedVersionException if the SDK protocol version is not * compatible with that supported by the browser * @throws MethodIsBlockingException because initialization implies couple of remote calls * (to request version etc) */ void attach(DebugEventListener listener) throws IOException, UnsupportedVersionException, MethodIsBlockingException; /** * @return name of embedding application as it wished to name itself; might be null */ String getEmbedderName(); /** * @return version of V8 implementation, format is unspecified; must not be null if * {@link StandaloneVm} has been attached */ // TODO: align this with {@link JavascriptVm#getVersion()} method. String getVmVersion(); /** * @return message explaining why VM is detached; may be null */ String getDisconnectReason(); }
Test with the default test runner for all Django versions.
#!/usr/bin/env python import os import sys from django.conf import settings if not settings.configured: settings.configure( DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', } }, INSTALLED_APPS=( 'selectable', ), SITE_ID=1, SECRET_KEY='super-secret', ROOT_URLCONF='selectable.tests.urls', ) from django.test.utils import get_runner def runtests(): TestRunner = get_runner(settings) test_runner = TestRunner(verbosity=1, interactive=True, failfast=False) args = sys.argv[1:] or ['selectable', ] failures = test_runner.run_tests(args) sys.exit(failures) if __name__ == '__main__': runtests()
#!/usr/bin/env python import os import sys from django.conf import settings if not settings.configured: settings.configure( DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', } }, INSTALLED_APPS=( 'selectable', ), SITE_ID=1, SECRET_KEY='super-secret', ROOT_URLCONF='selectable.tests.urls', TEST_RUNNER='django.test.simple.DjangoTestSuiteRunner', ) from django.test.utils import get_runner def runtests(): TestRunner = get_runner(settings) test_runner = TestRunner(verbosity=1, interactive=True, failfast=False) args = sys.argv[1:] or ['selectable', ] failures = test_runner.run_tests(args) sys.exit(failures) if __name__ == '__main__': runtests()
Add timestamps to management task output
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. from datetime import datetime from django.core.management.base import BaseCommand from oneanddone.tasks.models import Task, TaskAttempt class Command(BaseCommand): help = 'Cleans up status of tasks and attempts based on task data' def handle(self, *args, **options): invalidated = Task.invalidate_tasks() self.stdout.write('%s: %s tasks were invalidated via bug data\n' % (datetime.now().isoformat(), invalidated)) closed = TaskAttempt.close_stale_onetime_attempts() self.stdout.write('%s: %s stale one-time attempts were closed\n' % (datetime.now().isoformat(), closed)) closed = TaskAttempt.close_expired_task_attempts() self.stdout.write('%s: %s attempts for expired tasks were closed\n' % (datetime.now().isoformat(), closed))
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. from django.core.management.base import BaseCommand from oneanddone.tasks.models import Task, TaskAttempt class Command(BaseCommand): help = 'Cleans up status of tasks and attempts based on task data' def handle(self, *args, **options): invalidated = Task.invalidate_tasks() self.stdout.write('%s tasks were invalidated via bug data\n' % invalidated) closed = TaskAttempt.close_stale_onetime_attempts() self.stdout.write('%s stale one-time attempts were closed\n' % closed) closed = TaskAttempt.close_expired_task_attempts() self.stdout.write('%s attempts for expired tasks were closed\n' % closed)
Sort qrc input file list so that yubikey-manager-qt packages build in a reproducible way in spite of indeterministic filesystem readdir order See https://reproducible-builds.org/ for why this is good.
#!/usr/bin/env python import os import sys import json def read_conf(fname): if not os.path.isfile(fname): return {} with open(fname, 'r') as conf: return json.load(conf) def build_qrc(resources): yield '<RCC>' yield '<qresource>' for d in resources: for root, dirs, files in os.walk(d): dirs.sort() files.sort() for f in files: yield '<file>{}</file>'.format(os.path.join(root, f)) yield '</qresource>' yield '</RCC>' def build_resources(resources, target): with open(target, 'w') as f: for line in build_qrc(resources): f.write(line + os.linesep) def build(source): conf = read_conf(source) target = os.path.basename(source) if '.' in target: target = target.rsplit('.', 1)[0] target += '.qrc' build_resources(conf.get('resources', []), target) if __name__ == '__main__': build(sys.argv[1] if len(sys.argv) >= 1 else 'resources.json')
#!/usr/bin/env python import os import sys import json def read_conf(fname): if not os.path.isfile(fname): return {} with open(fname, 'r') as conf: return json.load(conf) def build_qrc(resources): yield '<RCC>' yield '<qresource>' for d in resources: for root, dirs, files in os.walk(d): for f in files: yield '<file>{}</file>'.format(os.path.join(root, f)) yield '</qresource>' yield '</RCC>' def build_resources(resources, target): with open(target, 'w') as f: for line in build_qrc(resources): f.write(line + os.linesep) def build(source): conf = read_conf(source) target = os.path.basename(source) if '.' in target: target = target.rsplit('.', 1)[0] target += '.qrc' build_resources(conf.get('resources', []), target) if __name__ == '__main__': build(sys.argv[1] if len(sys.argv) >= 1 else 'resources.json')
Remove db if it already exists
import sqlite3 import os import pandas as pd TABLES = [['Natures', 'nature'], ['Experience'], ] PATH = os.path.dirname(__file__)+"/" try: # Little Bobby Tables os.remove(PATH + 'serpyrior.db') except FileNotFoundError: pass CONNECTION = sqlite3.connect(PATH + 'serpyrior.db') for table in TABLES: table_name = table[0] print(table_name) try: table_index = table[1] write_index = False except IndexError: table_index = None write_index = True df = pd.read_csv(PATH + table_name + '.csv') df.to_sql(table_name, CONNECTION, index=write_index, index_label=table_index) CONNECTION.commit() CONNECTION.close()
import sqlite3 import os import pandas as pd TABLES = [['Natures', 'nature'], ['Experience'], ] PATH = os.path.dirname(__file__)+"/" CONNECTION = sqlite3.connect(PATH + 'serpyrior.db') # insert a little jimmy drop tables here for table in TABLES: table_name = table[0] print(table_name) try: table_index = table[1] write_index = False except IndexError: table_index = None write_index = True df = pd.read_csv(PATH + table_name + '.csv') df.to_sql(table_name, CONNECTION, index=write_index, index_label=table_index) CONNECTION.commit() CONNECTION.close() # cur = conn.cursor() # cur.execute("CREATE TABLE IF NOT EXISTS natures()") # filename.encode('utf-8') # with open(filename) as f: # reader = csv.reader(f) # for field in reader: # cur.execute("INSERT INTO natures VALUES (?,?,?,?,?,?,?);", field) # # conn.commit() # # df = pd.read_sql_query("SELECT * FROM natures", conn, index_col='nature') # # print(df.head(25)) # conn.close()
Add gulp release script to generate minified build.
var gulp = require('gulp') var source = require('vinyl-source-stream') var serve = require('gulp-serve') var rename = require('gulp-rename') var browserify = require('browserify') var buffer = require('vinyl-buffer') var uglify = require('gulp-uglify') var taskListing = require('gulp-task-listing') // Static file server gulp.task('server', serve({ root: ['example', 'dist'], port: 7000 })) gulp.task('release', function () { return browserify('./src/opbeat.js', { standalone: 'Opbeat' }).bundle() .pipe(source('opbeat.js')) .pipe(gulp.dest('./dist')) .pipe(rename('opbeat.min.js')) .pipe(buffer()) .pipe(uglify()) .pipe(gulp.dest('./dist')) }) gulp.task('build', function () { return browserify('./src/opbeat.js', { standalone: 'Opbeat' }).bundle() .pipe(source('opbeat.js')) .pipe(gulp.dest('./dist')) }) // Development mode gulp.task('watch', [], function (cb) { gulp.run( 'build', 'server' ) // Watch JS files gulp.watch(['libs/**', 'src/**'], ['build']) console.log('\nExample site running on http://localhost:7000/\n') }) gulp.task('default', taskListing)
var gulp = require('gulp') var browserify = require('browserify') var source = require('vinyl-source-stream') var serve = require('gulp-serve') var concat = require('gulp-concat') // Static file server gulp.task('server', serve({ root: ['example', 'dist'], port: 7000 })) gulp.task('build', function () { return browserify('./src/opbeat.js', { standalone: 'Opbeat' }).bundle() .pipe(source('opbeat.js')) .pipe(gulp.dest('./dist')) }) // Development mode gulp.task('watch', [], function (cb) { gulp.run( 'build', 'server' ) // Watch JS files gulp.watch(['libs/**', 'src/**'], ['build']) console.log('\nExample site running on http://localhost:7000/\n') }) // // Default task // gulp.task('default', function () { var response = ['', 'No task selected.', 'Available tasks:', '', 'gulp watch - Watch files and preview example site on localhost.', '' ].join('\n') console.log(response) })
Update the DB-inserting script for wifi.
#!/usr/bin/env python """Temperature into database""" import glob from time import sleep import urllib2 import urllib base_dir = '/sys/bus/w1/devices/' device_folder = glob.glob(base_dir + '28*')[0] device_file = device_folder + '/w1_slave' try: while True: lines = open(device_file, 'r').readlines() string = lines[1][-6:].replace('=', '') t = int(string) temp_c = t / 1000.0 temp_f = temp_c * 9.0 / 5.0 + 32.0 data = {} data['temperature'] = str(temp_f) data['room'] = '1' url_values = urllib.urlencode(data) url = 'http://192.168.1.4/addtemperature' full_url = url + '?' + url_values data = urllib2.urlopen(full_url) print data.read() sleep(60) except KeyboardInterrupt: pass
#!/usr/bin/env python """Temperature into database""" import glob from time import sleep import urllib2 import urllib base_dir = '/sys/bus/w1/devices/' device_folder = glob.glob(base_dir + '28*')[0] device_file = device_folder + '/w1_slave' try: while True: lines = open(device_file, 'r').readlines() string = lines[1][-6:].replace('=', '') t = int(string) temp_c = t / 1000.0 temp_f = temp_c * 9.0 / 5.0 + 32.0 data = {} data['temperature'] = str(temp_f) data['room'] = '1' url_values = urllib.urlencode(data) url = 'http://192.168.1.6/addtemperature' full_url = url + '?' + url_values data = urllib2.urlopen(full_url) print data.read() sleep(4) except KeyboardInterrupt: pass
Use ArgumentParser to read paramaters
#!/usr/bin/env python import os import sys import socket from os import path from pkg_resources import * import argparse parser = argparse.ArgumentParser(description='Runs the ogcserver as WMS server') parser.add_argument('mapfile', type=str, help=''' A XML mapnik stylesheet ''') args = parser.parse_args() sys.path.insert(0,os.path.abspath('.')) from ogcserver.wsgi import WSGIApp import ogcserver default_conf = resource_filename(ogcserver.__name__, 'default.conf') application = WSGIApp(default_conf,args.mapfile) if __name__ == '__main__': from wsgiref.simple_server import make_server #if os.uname()[0] == 'Darwin': # host = socket.getfqdn() # yourname.local #else: # host = '0.0.0.0' host = '0.0.0.0' port = 8000 httpd = make_server(host, port, application) print "Listening at %s:%s...." % (host,port) httpd.serve_forever()
#!/usr/bin/env python import os import sys import socket from os import path from pkg_resources import * if not len(sys.argv) > 1: sys.exit('Usage: %s <map.xml>' % os.path.basename(sys.argv[0])) sys.path.insert(0,os.path.abspath('.')) from ogcserver.wsgi import WSGIApp import ogcserver default_conf = resource_filename(ogcserver.__name__, 'default.conf') application = WSGIApp(default_conf,mapfile=sys.argv[1]) if __name__ == '__main__': from wsgiref.simple_server import make_server #if os.uname()[0] == 'Darwin': # host = socket.getfqdn() # yourname.local #else: # host = '0.0.0.0' host = '0.0.0.0' port = 8000 httpd = make_server(host, port, application) print "Listening at %s:%s...." % (host,port) httpd.serve_forever()
Move standard-minifiers self-test timeout *after* slow run.match.
import selftest, {Sandbox} from '../tool-testing/selftest.js'; selftest.define('standard-minifiers - CSS splitting', function (options) { const s = new Sandbox({ clients: options.clients }); s.createApp('myapp', 'minification-css-splitting'); s.cd('myapp'); s.testWithAllClients(function (run) { run.waitSecs(5); run.match('myapp'); run.match('proxy'); run.match('MongoDB'); run.match('your app'); run.match('running at'); run.match('localhost'); run.connectClient(); run.waitSecs(20); run.match('client connected'); run.match('the number of stylesheets: <2>'); run.match('the color of the tested 4097th property: <rgb(0, 128, 0)>'); s.append('client/lots-of-styles.main.styl', ` .class-4097 color: blue `); run.match('Client modified -- refreshing'); run.waitSecs(90); run.match('the number of stylesheets: <2>'); run.match('the color of the tested 4097th property: <rgb(0, 0, 255)>'); run.stop(); }, '--production'); });
import selftest, {Sandbox} from '../tool-testing/selftest.js'; selftest.define('standard-minifiers - CSS splitting', function (options) { const s = new Sandbox({ clients: options.clients }); s.createApp('myapp', 'minification-css-splitting'); s.cd('myapp'); s.testWithAllClients(function (run) { run.waitSecs(5); run.match('myapp'); run.match('proxy'); run.match('MongoDB'); run.match('your app'); run.match('running at'); run.match('localhost'); run.connectClient(); run.waitSecs(20); run.match('client connected'); run.match('the number of stylesheets: <2>'); run.match('the color of the tested 4097th property: <rgb(0, 128, 0)>'); s.append('client/lots-of-styles.main.styl', ` .class-4097 color: blue `); run.waitSecs(60); run.match('Client modified -- refreshing'); run.match('the number of stylesheets: <2>'); run.match('the color of the tested 4097th property: <rgb(0, 0, 255)>'); run.stop(); }, '--production'); });
Change DataType test to not null assertion
<?php class DataTypeTest extends PHPUnit_Framework_TestCase { public function testAutoload() { $this->assertInstanceOf('DataType', new DataType); } public function testImportDataType() { $xml = new SimpleXMLElement(' <DataType dtName="Test" dtHandle="test"/> '); $DataType = new DataType; $dataType = $DataType->import($xml); $this->assertNotNull($dataType->dtID); $this->assertEquals('Test', $dataType->dtName); $this->assertEquals('test', $dataType->dtHandle); } public function testImportDataTypeIncorrectElementName() { $xml = new SimpleXMLElement(' <Data dtName="Test" dtHandle="test"/> '); $DataType = new DataType; $this->setExpectedException('DataTypeException'); $DataType->import($xml); } }
<?php class DataTypeTest extends PHPUnit_Framework_TestCase { public function testAutoload() { $this->assertInstanceOf('DataType', new DataType); } public function testImportDataType() { $xml = new SimpleXMLElement(' <DataType dtName="Test" dtHandle="test"/> '); $DataType = new DataType; $dataType = $DataType->import($xml); $this->assertNotEmpty('dtID', $dataType); $this->assertEquals('Test', $dataType->dtName); $this->assertEquals('test', $dataType->dtHandle); } public function testImportDataTypeIncorrectElementName() { $xml = new SimpleXMLElement(' <Data dtName="Test" dtHandle="test"/> '); $DataType = new DataType; $this->setExpectedException('DataTypeException'); $DataType->import($xml); } }
Use more correct variable name.
const { isInteger } = require("lodash"); const render = require("./render"); const toPromise = require("./consumers/promise"); const toNodeStream = require("./consumers/node-stream"); class Renderer { constructor (vdomNode, sequence) { this.sequence = sequence || render(vdomNode); this.batchSize = 100; this.next = this.sequence.next.bind(this.sequence); } toPromise () { return toPromise(this.sequence, this.batchSize); } toStream () { return toNodeStream(this.sequence, this.batchSize); } tuneAsynchronicity (batchSize) { if (!isInteger(batchSize) || batchSize < 1) { throw new RangeError("Asynchronicity must be an integer greater than or equal to 1."); } this.batchSize = batchSize; return this; } } module.exports = Renderer;
const { isInteger } = require("lodash"); const render = require("./render"); const toPromise = require("./consumers/promise"); const toNodeStream = require("./consumers/node-stream"); class Renderer { constructor (jsx, sequence) { this.sequence = sequence || render(jsx); this.batchSize = 100; this.next = this.sequence.next.bind(this.sequence); } toPromise () { return toPromise(this.sequence, this.batchSize); } toStream () { return toNodeStream(this.sequence, this.batchSize); } tuneAsynchronicity (batchSize) { if (!isInteger(batchSize) || batchSize < 1) { throw new RangeError("Asynchronicity must be an integer greater than or equal to 1."); } this.batchSize = batchSize; return this; } } module.exports = Renderer;
Make the null value consistent between vertical and tabular output.
from .tabulate import _text_type def pad(field, total, char=u" "): return field + (char * (total - len(field))) def get_separator(num, header_len, data_len): sep = u"***************************[ %d. row ]***************************\n" % (num + 1) return sep def expanded_table(rows, headers): header_len = max([len(x) for x in headers]) max_row_len = 0 results = [] padded_headers = [pad(x, header_len) + u" |" for x in headers] header_len += 2 for row in rows: row_len = max([len(_text_type(x)) for x in row]) row_result = [] if row_len > max_row_len: max_row_len = row_len for header, value in zip(padded_headers, row): if value is None: value = '<null>' row_result.append(u"%s %s" % (header, value)) results.append('\n'.join(row_result)) output = [] for i, result in enumerate(results): output.append(get_separator(i, header_len, max_row_len)) output.append(result) output.append('\n') return ''.join(output)
from .tabulate import _text_type def pad(field, total, char=u" "): return field + (char * (total - len(field))) def get_separator(num, header_len, data_len): sep = u"***************************[ %d. row ]***************************\n" % (num + 1) return sep def expanded_table(rows, headers): header_len = max([len(x) for x in headers]) max_row_len = 0 results = [] padded_headers = [pad(x, header_len) + u" |" for x in headers] header_len += 2 for row in rows: row_len = max([len(_text_type(x)) for x in row]) row_result = [] if row_len > max_row_len: max_row_len = row_len for header, value in zip(padded_headers, row): row_result.append(u"%s %s" % (header, value)) results.append('\n'.join(row_result)) output = [] for i, result in enumerate(results): output.append(get_separator(i, header_len, max_row_len)) output.append(result) output.append('\n') return ''.join(output)