hash
stringlengths
40
40
diff
stringlengths
131
114k
message
stringlengths
7
980
project
stringlengths
5
67
split
stringclasses
1 value
b419c75ad3efb2218af1c59ed359f3f222f88b32
diff --git a/lib/pake/tasks/pakePhpExtensionTask.class.php b/lib/pake/tasks/pakePhpExtensionTask.class.php index <HASH>..<HASH> 100644 --- a/lib/pake/tasks/pakePhpExtensionTask.class.php +++ b/lib/pake/tasks/pakePhpExtensionTask.class.php @@ -10,12 +10,14 @@ class pakePhpExtensionTask { public static $tasks = array( - 'clean' => array('Clean all temporary files', array()), + 'clean' => array('Clean all temporary files', array('pakePhpExtensionTask::_clean_build', 'pakePhpExtensionTask::_clean_config')), 'configure' => array(null, array()), 'build' => array(null, array('pakePhpExtensionTask::configure')), - 'install' => array('configure, build and install extension', array('pakePhpExtensionTask::build')), - 'reconfigure' => array('reconfigure (needed if you change config.m4 file)', array('pakePhpExtensionTask::clean', 'pakePhpExtensionTask::configure')), + 'install' => array('configure, build and install extension. (options: --with-phpize, --with-php-config)', array('pakePhpExtensionTask::build')), + 'reconfigure' => array('reconfigure. needed if you change config.m4 file (options: --with-phpize, --with-php-config)', array('pakePhpExtensionTask::_clean_build', 'pakePhpExtensionTask::configure')), 'test' => array('run tests', array('pakePhpExtensionTask::build')), + '_clean_build' => array(null, array()), + '_clean_config' => array(null, array()), ); public static function import_default_tasks() @@ -28,13 +30,48 @@ class pakePhpExtensionTask } } - public static function run_configure() + public static function run_configure($task, $args, $long_args) { + $dir = dirname(pakeApp::get_instance()->getPakefilePath()); + $cfg_file = $dir.'/'.__CLASS__.'.yaml'; + + $need_to_write = true; + + if (isset($long_args['with-phpize'])) { + $phpize = $long_args['with-phpize']; + } elseif (file_exists($cfg_file)) { + $cfg_data = pakeYaml::loadFile($cfg_file); + $phpize = $cfg_data['phpize']; + $need_to_write = false; + } else { + $phpize = pake_which('phpize'); + } + + if (!file_exists($phpize)) + throw new pakeException('"'.$phpize.'" is not available'); + + if (isset($long_args['with-php-config'])) { + $php_config = $long_args['with-php-config']; + $need_to_write = true; + } elseif (isset($cfg_data)) { + $php_config = $cfg_data['php_config']; + } else { + $php_config = dirname($phpize).'/php-config'; + $need_to_write = true; + } + + if (!file_exists($php_config)) + throw new pakeException('"'.$php_config.'" is not available'); + if (!file_exists('configure')) - pake_sh('phpize'); + pake_sh(escapeshellarg($phpize)); if (!file_exists('Makefile')) { - pake_sh(realpath('configure')); + pake_sh(escapeshellarg(realpath('configure')).' '.escapeshellarg('--with-php-config='.$php_config)); + } + + if ($need_to_write) { + pakeYaml::emitFile(array('phpize' => $phpize, 'php_config' => $php_config), $cfg_file); } } @@ -50,13 +87,40 @@ class pakePhpExtensionTask pake_superuser_sh('make install'); } - public static function run_clean() + public static function run_clean() {} + public static function run__clean_build() { + $dir = dirname(pakeApp::get_instance()->getPakefilePath()); + $cfg_file = $dir.'/'.__CLASS__.'.yaml'; + if (file_exists('Makefile')) pake_sh('make distclean'); - if (file_exists('configure')) - pake_sh('phpize --clean'); + if (file_exists('configure')) { + if (isset($long_args['with-phpize'])) { + $phpize = $long_args['with-phpize']; + } elseif (file_exists($cfg_file)) { + $cfg_data = pakeYaml::loadFile($cfg_file); + $phpize = $cfg_data['phpize']; + } else { + $phpize = pake_which('phpize'); + } + + if (!file_exists($phpize)) + throw new pakeException('"'.$phpize.'" is not available'); + + pake_sh(escapeshellarg($phpize).' --clean'); + } + } + + public static function run__clean_config() + { + $dir = dirname(pakeApp::get_instance()->getPakefilePath()); + $cfg_file = $dir.'/'.__CLASS__.'.yaml'; + + if (file_exists($cfg_file)) { + pake_remove($cfg_file, ''); + } } public static function run_test($task)
support for --with-phpize and --with-php-config options (to allow building extension against non-default php installation)
indeyets_pake
train
63b8451609d62e5a39c510d72d803834f19098ed
diff --git a/src/Kunstmaan/AdminBundle/Command/UpdateAclCommand.php b/src/Kunstmaan/AdminBundle/Command/UpdateAclCommand.php index <HASH>..<HASH> 100644 --- a/src/Kunstmaan/AdminBundle/Command/UpdateAclCommand.php +++ b/src/Kunstmaan/AdminBundle/Command/UpdateAclCommand.php @@ -46,7 +46,7 @@ class UpdateAclCommand extends ContainerAwareCommand // Select Permission(s) $permissionMap = $this->getContainer()->get('security.acl.permission.map'); - $question = new ChoiceQuestion('Select permissions(s) (seperate by ",")', + $question = new ChoiceQuestion('Select permissions(s) (separate by ",")', $permissionMap->getPossiblePermissions()); $question->setMultiselect(true); $mask = array_reduce($helper->ask($input, $output, $question), function ($a, $b) use ($permissionMap) {
Typo (#<I>)
Kunstmaan_KunstmaanBundlesCMS
train
06471a19594f121b7f82acfcb116854f0623ed33
diff --git a/src/Gzero/Api/Controller/Admin/UserController.php b/src/Gzero/Api/Controller/Admin/UserController.php index <HASH>..<HASH> 100644 --- a/src/Gzero/Api/Controller/Admin/UserController.php +++ b/src/Gzero/Api/Controller/Admin/UserController.php @@ -1,5 +1,11 @@ <?php namespace Gzero\Api\Controller\Admin; +use Gzero\Api\Controller\ApiController; +use Gzero\Repository\UserRepository; +use Gzero\Api\Transformer\UserTransformer; +use Gzero\Api\UrlParamsProcessor; +use Gzero\Api\Validator\UserValidator; + /** * This file is part of the GZERO CMS package. * @@ -7,10 +13,66 @@ * file that was distributed with this source code. * * Class UserController + * * @package Gzero\Api\Controller\Admin - * @author Adrian Skierniewski <adrian.skierniewski@gmail.com> - * @copyright Copyright (c) 2015, Adrian Skierniewski + * @author Mateusz Urbanowicz <urbanowiczmateusz89@gmail.com> + * @copyright Copyright (c) 2015, Mateusz Urbanowicz */ -class UserController { +class UserController extends ApiController { + + /** + * @var UserRepository + */ + protected $userRepo; + + /** + * ContentController constructor. + * + * @param UrlParamsProcessor $processor Url processor + * @param ContentRepository $content Content repository + * @param ContentValidator $validator Content validator + */ + public function __construct(UrlParamsProcessor $processor, UserRepository $content, UserValidator $validator) + { + parent::__construct($processor); + $this->validator = $validator->setData(\Input::all()); + $this->userRepo = $content; + } + + /** + * Display list of users + * + * @return \Illuminate\Http\JsonResponse + */ + public function index() + { + $input = $this->validator->validate('list'); + $params = $this->processor->process($input)->getProcessedFields(); + $results = $this->userRepo->retrieveUsers( + $params['filter'], + $params['orderBy'], + $params['page'], + $params['perPage'] + ); + return $this->respondWithSuccess($results, new UserTransformer); + } + + /** + * Display the specified resource. + * + * @param $id User ID + * + * @return Response + */ + public function show($id) + { + $user = $this->userRepo->retrieveById($id); + if (empty($user)) { + return $this->respondNotFound(); + } else { + return $this->respondWithSuccess($user, new UserTransformer); + } + } + } diff --git a/src/Gzero/Api/Validator/UserValidator.php b/src/Gzero/Api/Validator/UserValidator.php index <HASH>..<HASH> 100644 --- a/src/Gzero/Api/Validator/UserValidator.php +++ b/src/Gzero/Api/Validator/UserValidator.php @@ -1,16 +1,36 @@ <?php namespace Gzero\Api\Validator; +use Gzero\Validator\AbstractValidator; + /** * This file is part of the GZERO CMS package. * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. * - * Class UserValidator + * Class ContentParamsValidator + * * @package Gzero\Api\Validator - * @author Adrian Skierniewski <adrian.skierniewski@gmail.com> - * @copyright Copyright (c) 2015, Adrian Skierniewski + * @author Adrian Skierniewski <urbanowiczmateusz89@gmail.com.com> + * @copyright Copyright (c) 2015, Mateusz Urbanowicz */ -class UserValidator { +class UserValidator extends AbstractValidator { + + /** + * @var array + */ + protected $rules = [ + 'list' => [ + 'page' => 'numeric', + 'perPage' => 'numeric', + 'sort' => '', + ], + ]; + /** + * @var array + */ + protected $filters = [ + 'title' => 'trim' + ]; } diff --git a/src/routes.php b/src/routes.php index <HASH>..<HASH> 100644 --- a/src/routes.php +++ b/src/routes.php @@ -27,6 +27,8 @@ Route::group( Route::resource('contents.uploads', 'Gzero\Api\Controller\Admin\UploadController'); // Uploads Route::resource('uploads', 'Gzero\Api\Controller\Admin\UploadController'); + // Users + Route::resource('users', 'Gzero\Api\Controller\Admin\UserController'); } ); }
KMS-<I> UserValidator and index user api action
GrupaZero_api
train
8ee7ad2209502f43496ff65e32fbcd7573b13c26
diff --git a/api/client/login.go b/api/client/login.go index <HASH>..<HASH> 100644 --- a/api/client/login.go +++ b/api/client/login.go @@ -11,7 +11,6 @@ import ( Cli "github.com/docker/docker/cli" flag "github.com/docker/docker/pkg/mflag" "github.com/docker/docker/pkg/term" - "github.com/docker/docker/registry" "github.com/docker/engine-api/client" "github.com/docker/engine-api/types" ) @@ -36,16 +35,7 @@ func (cli *DockerCli) CmdLogin(args ...string) error { cli.in = os.Stdin } - // The daemon `/info` endpoint informs us of the default registry being - // used. This is essential in cross-platforms environment, where for - // example a Linux client might be interacting with a Windows daemon, hence - // the default registry URL might be Windows specific. - serverAddress := registry.IndexServer - if info, err := cli.client.Info(); err != nil { - fmt.Fprintf(cli.out, "Warning: failed to get default registry endpoint from daemon (%v). Using system default: %s\n", err, serverAddress) - } else { - serverAddress = info.IndexServerAddress - } + serverAddress := cli.electAuthServer() if len(cmd.Args()) > 0 { serverAddress = cmd.Arg(0) } diff --git a/api/client/logout.go b/api/client/logout.go index <HASH>..<HASH> 100644 --- a/api/client/logout.go +++ b/api/client/logout.go @@ -5,7 +5,6 @@ import ( Cli "github.com/docker/docker/cli" flag "github.com/docker/docker/pkg/mflag" - "github.com/docker/docker/registry" ) // CmdLogout logs a user out from a Docker registry. @@ -14,12 +13,12 @@ import ( // // Usage: docker logout [SERVER] func (cli *DockerCli) CmdLogout(args ...string) error { - cmd := Cli.Subcmd("logout", []string{"[SERVER]"}, Cli.DockerCommands["logout"].Description+".\nIf no server is specified \""+registry.IndexServer+"\" is the default.", true) + cmd := Cli.Subcmd("logout", []string{"[SERVER]"}, Cli.DockerCommands["logout"].Description+".\nIf no server is specified, the default is defined by the daemon.", true) cmd.Require(flag.Max, 1) cmd.ParseFlags(args, true) - serverAddress := registry.IndexServer + serverAddress := cli.electAuthServer() if len(cmd.Args()) > 0 { serverAddress = cmd.Arg(0) } diff --git a/api/client/utils.go b/api/client/utils.go index <HASH>..<HASH> 100644 --- a/api/client/utils.go +++ b/api/client/utils.go @@ -21,6 +21,20 @@ import ( registrytypes "github.com/docker/engine-api/types/registry" ) +func (cli *DockerCli) electAuthServer() string { + // The daemon `/info` endpoint informs us of the default registry being + // used. This is essential in cross-platforms environment, where for + // example a Linux client might be interacting with a Windows daemon, hence + // the default registry URL might be Windows specific. + serverAddress := registry.IndexServer + if info, err := cli.client.Info(); err != nil { + fmt.Fprintf(cli.out, "Warning: failed to get default registry endpoint from daemon (%v). Using system default: %s\n", err, serverAddress) + } else { + serverAddress = info.IndexServerAddress + } + return serverAddress +} + // encodeAuthToBase64 serializes the auth configuration as JSON base64 payload func encodeAuthToBase64(authConfig types.AuthConfig) (string, error) { buf, err := json.Marshal(authConfig)
Enable cross-platforms logout from Registry
containers_storage
train
871795d9cece15b0996da2b9854ce9c7fecc4379
diff --git a/benchexec/tools/smack.py b/benchexec/tools/smack.py index <HASH>..<HASH> 100644 --- a/benchexec/tools/smack.py +++ b/benchexec/tools/smack.py @@ -28,6 +28,7 @@ import re class Tool(benchexec.tools.template.BaseTool): REQUIRED_PATHS = [ + "boogie", "corral", "llvm", "lockpwn",
add missing required directory for SMACK
sosy-lab_benchexec
train
b17fc98a18a4469ad7d3ef1fe65111f589d47f8d
diff --git a/azurerm/internal/services/apimanagement/resource_arm_api_management_api_operation.go b/azurerm/internal/services/apimanagement/resource_arm_api_management_api_operation.go index <HASH>..<HASH> 100644 --- a/azurerm/internal/services/apimanagement/resource_arm_api_management_api_operation.go +++ b/azurerm/internal/services/apimanagement/resource_arm_api_management_api_operation.go @@ -245,7 +245,7 @@ func resourceArmApiManagementApiOperationDelete(d *schema.ResourceData, meta int } func expandApiManagementOperationRequestContract(input []interface{}) (*apimanagement.RequestContract, error) { - if len(input) == 0 { + if len(input) == 0 || input[0] == nil { return nil, nil }
azurerm_api_management_operation - will no longer panic on miss… (#<I>)
terraform-providers_terraform-provider-azurerm
train
eccede47c5c069bae752390342c78acdc7b48cc2
diff --git a/tests/Routing/RoutingUrlGeneratorTest.php b/tests/Routing/RoutingUrlGeneratorTest.php index <HASH>..<HASH> 100755 --- a/tests/Routing/RoutingUrlGeneratorTest.php +++ b/tests/Routing/RoutingUrlGeneratorTest.php @@ -4,6 +4,7 @@ namespace Illuminate\Tests\Routing; use Illuminate\Http\Request; use Illuminate\Routing\Route; +use InvalidArgumentException; use PHPUnit\Framework\TestCase; use Illuminate\Routing\UrlGenerator; use Illuminate\Routing\RouteCollection; @@ -547,6 +548,20 @@ class RoutingUrlGeneratorTest extends TestCase $this->assertEquals($url->to('/foo'), $url->previous('/foo')); } + + /** + * @expectedException InvalidArgumentException + * @expectedExceptionMessage Route [not_exists_route] not defined. + */ + public function testRouteNotDefinedException() + { + $url = new UrlGenerator( + $routes = new RouteCollection, + $request = Request::create('http://www.foo.com/') + ); + + $url->route('not_exists_route'); + } } class RoutableInterfaceStub implements UrlRoutable
[<I>] add test for `Route [$routeName] not defined.` exception (#<I>) - add test for `Route [$routeName] not defined.` exception since in <URL>
laravel_framework
train
17dd323f0c892619287269d7dce93a2305845122
diff --git a/src/Collection/Iterator/ExtractIterator.php b/src/Collection/Iterator/ExtractIterator.php index <HASH>..<HASH> 100644 --- a/src/Collection/Iterator/ExtractIterator.php +++ b/src/Collection/Iterator/ExtractIterator.php @@ -24,12 +24,12 @@ class ExtractIterator extends Collection { /** - * A path to follow inside a hierarchy in order to get a particular property, - * which name is the last in this array + * A callable responsible for extracting a single value for each + * item in the collection. * - * @var array + * @var callable */ - protected $_path; + protected $_extractor; /** * Creates the iterator that will return the requested property for each value @@ -53,7 +53,7 @@ class ExtractIterator extends Collection */ public function __construct($items, $path) { - $this->_path = explode('.', $path); + $this->_extractor = $this->_propertyExtractor($path); parent::__construct($items); } @@ -65,7 +65,7 @@ class ExtractIterator extends Collection */ public function current() { - $current = parent::current(); - return $this->_extract($current, $this->_path); + $extractor = $this->_extractor; + return $extractor(parent::current()); } } diff --git a/tests/TestCase/Collection/Iterator/ExtractIteratorTest.php b/tests/TestCase/Collection/Iterator/ExtractIteratorTest.php index <HASH>..<HASH> 100644 --- a/tests/TestCase/Collection/Iterator/ExtractIteratorTest.php +++ b/tests/TestCase/Collection/Iterator/ExtractIteratorTest.php @@ -83,4 +83,20 @@ class ExtractIteratorTest extends TestCase $extractor = new ExtractIterator($items, 'a.b.c'); $this->assertEquals([10, null, null, 25], iterator_to_array($extractor)); } + + /** + * Tests that it is possible to pass a callable as the extractor. + * + * @return void + */ + public function testExtractWithCallable() { + $items = [ + ['a' => 1, 'b' => 2], + ['a' => 3, 'b' => 4] + ]; + $extractor = new ExtractIterator($items, function ($item) { + return $item['b']; + }); + $this->assertEquals([2, 4], iterator_to_array($extractor)); + } }
Adding the ability of using a callable for Collection::extract()
cakephp_cakephp
train
e225b7cc63e1d67f86e9d5a26b69d66470da659c
diff --git a/revolver/tool/memcached.py b/revolver/tool/memcached.py index <HASH>..<HASH> 100644 --- a/revolver/tool/memcached.py +++ b/revolver/tool/memcached.py @@ -4,7 +4,7 @@ from revolver import command from revolver import package def install(): - package.install('memcached') + package.install(["memcached", "libmemcached-dev"]) def ensure(): if command.exists('memcached'):
Install libmemcached in tool.memcached too
michaelcontento_revolver
train
0e195fd447ccb0fa08dd9c2b0229603602da6eea
diff --git a/provision/docker/docker.go b/provision/docker/docker.go index <HASH>..<HASH> 100644 --- a/provision/docker/docker.go +++ b/provision/docker/docker.go @@ -255,24 +255,20 @@ func (c *container) remove() error { err := dockerCluster().RemoveContainer(c.ID) if err != nil { log.Printf("Failed to remove container from docker: %s", err) - return err } runCmd("ssh-keygen", "-R", c.IP) log.Printf("Removing container %s from database", c.ID) coll := collection() defer coll.Database.Session.Close() if err := coll.RemoveId(c.ID); err != nil { - log.Printf("Failed to remove container from database: %s", err.Error()) - return err + log.Printf("Failed to remove container from database: %s", err) } r, err := getRouter() if err != nil { - log.Printf("Failed to obtain router: %s", err.Error()) - return err + log.Printf("Failed to obtain router: %s", err) } if err := r.RemoveRoute(c.AppName, address); err != nil { - log.Printf("Failed to remove route: %s", err.Error()) - return err + log.Printf("Failed to remove route: %s", err) } return nil } diff --git a/provision/docker/docker_test.go b/provision/docker/docker_test.go index <HASH>..<HASH> 100644 --- a/provision/docker/docker_test.go +++ b/provision/docker/docker_test.go @@ -207,6 +207,29 @@ func (s *S) TestContainerRemove(c *gocheck.C) { c.Assert(ok, gocheck.Equals, true) } +func (s *S) TestRemoveContainerIgnoreErrors(c *gocheck.C) { + fexec := &etesting.FakeExecutor{} + setExecut(fexec) + defer setExecut(nil) + err := s.newImage() + c.Assert(err, gocheck.IsNil) + container, err := s.newContainer() + c.Assert(err, gocheck.IsNil) + defer rtesting.FakeRouter.RemoveBackend(container.AppName) + client, _ := dockerClient.NewClient(s.server.URL()) + err = client.RemoveContainer(container.ID) + c.Assert(err, gocheck.IsNil) + err = container.remove() + c.Assert(err, gocheck.IsNil) + args := []string{"-R", container.IP} + c.Assert(fexec.ExecutedCmd("ssh-keygen", args), gocheck.Equals, true) + coll := s.conn.Collection(s.collName) + err = coll.FindId(container.ID).One(&container) + c.Assert(err, gocheck.NotNil) + c.Assert(err.Error(), gocheck.Equals, "not found") + c.Assert(rtesting.FakeRouter.HasRoute(container.AppName, container.getAddress()), gocheck.Equals, false) +} + func (s *S) TestContainerIP(c *gocheck.C) { err := s.newImage() c.Assert(err, gocheck.IsNil)
provision/docker: don't return erro on container.remove Now it works like app-remove. Aborting when the container does not exist in Docker may lead the system to an inconsistent state.
tsuru_tsuru
train
9288ffcf042cfbd4d627abfaf2c8aa9ad8d1a05c
diff --git a/hazelcast-client/src/main/java/com/hazelcast/client/spi/impl/ClientClusterServiceImpl.java b/hazelcast-client/src/main/java/com/hazelcast/client/spi/impl/ClientClusterServiceImpl.java index <HASH>..<HASH> 100644 --- a/hazelcast-client/src/main/java/com/hazelcast/client/spi/impl/ClientClusterServiceImpl.java +++ b/hazelcast-client/src/main/java/com/hazelcast/client/spi/impl/ClientClusterServiceImpl.java @@ -385,6 +385,7 @@ public final class ClientClusterServiceImpl implements ClientClusterService { } catch (Exception e) { logger.severe("Error while connecting to cluster!", e); client.getLifecycleService().shutdown(); + latch.countDown(); return; } }
fix client blocking on latch if fails to connect the cluster
hazelcast_hazelcast
train
f7e9662e3f6a236aa1ea85faa41c29be77e454ba
diff --git a/package.json b/package.json index <HASH>..<HASH> 100644 --- a/package.json +++ b/package.json @@ -3,7 +3,7 @@ "lerna": "^2.5.1" }, "scripts": { - "start": "lerna run start ${PACKAGE:+--scope=@jpmorganchase/${PACKAGE}}", + "start": "lerna run start ${PACKAGE:+--scope=@jpmorganchase/${PACKAGE}} --stream", "puppeteer": "docker run -it --rm --shm-size=2g -u root -e WRITE_TESTS=${WRITE_TESTS} -v $(pwd):/src -w /src/packages/${PACKAGE} zenato/puppeteer ./node_modules/.bin/jest --runInBand", "postinstall": "lerna bootstrap --hoist", "test": "npm run test_perspective && npm run test_viewer && npm run test_hypergrid && npm run test_highcharts", diff --git a/packages/perspective-viewer-hypergrid/src/js/fixes.js b/packages/perspective-viewer-hypergrid/src/js/fixes.js index <HASH>..<HASH> 100644 --- a/packages/perspective-viewer-hypergrid/src/js/fixes.js +++ b/packages/perspective-viewer-hypergrid/src/js/fixes.js @@ -56,7 +56,7 @@ export function GridUIFixPlugin(grid) { let range = this.component.grid.getVisibleRows(); let s = range[1]; let e = range[range.length - 1]; - if (range.length > 1 && this.dirty && (this.__cached_start !== s || this.__cached_end !== e)) { + if (this.component.grid._cache_update && range.length > 1 && this.dirty && (this.__cached_start !== s || this.__cached_end !== e)) { if (this._updating_cache) { this._updating_cache.cancel(); } diff --git a/packages/perspective-viewer-hypergrid/src/js/hypergrid.js b/packages/perspective-viewer-hypergrid/src/js/hypergrid.js index <HASH>..<HASH> 100644 --- a/packages/perspective-viewer-hypergrid/src/js/hypergrid.js +++ b/packages/perspective-viewer-hypergrid/src/js/hypergrid.js @@ -146,9 +146,11 @@ function setPSP(payload) { col_settings['type'] = payload.columnTypes[i] === 'str' ? 'string' : payload.columnTypes[i]; processed_schema.push(col_settings); } + var old_schema = this.grid.behavior.subgrids.lookup.data.schema; this.schema_loaded = this.schema_loaded && _.isEqual(processed_schema, old_schema); this.schema = processed_schema; + if (this.schema_loaded) { this.grid.setData({ data: payload.rows, @@ -169,7 +171,7 @@ function setPSP(payload) { } } console.log('Setting up initial schema and data load into HyperGrid'); - this.grid.behavior.setData({ + this.grid.setData({ data: payload.rows, schema: this.schema }); @@ -279,7 +281,7 @@ function PerspectiveDataModel(grid) { // Override setData setData: function (dataPayload, schema) { this.viewData = dataPayload; - this.source.setData(dataPayload, schema); + this.source.setData(dataPayload, schema); }, // Is the grid view a tree @@ -422,7 +424,7 @@ var conv = { 'date': 'date' } -function psp2hypergrid(data, schema) { +function psp2hypergrid(data, schema, start = 0, end = undefined, length = undefined) { if (data.length === 0) { return { rowPaths: [], @@ -443,7 +445,10 @@ function psp2hypergrid(data, schema) { let flat_columns = columnPaths.map(col => col.join(",")); let rows = []; - for (let idx = 0; idx < data.length; idx++) { + if (length) { + rows.length = length; + } + for (let idx = start; idx < (end || data.length); idx++) { const row = data[idx] || {}; let new_row = []; let row_path = []; @@ -461,11 +466,11 @@ function psp2hypergrid(data, schema) { for (var col of flat_columns) { new_row.push(row[col]); } - rows.push({ + rows[idx] ={ rowPath: row_path, rowData: new_row, - rowLeaf: row_leaf - }); + isLeaf: row_leaf + }; } var hg_data = { @@ -610,8 +615,10 @@ async function grid(div, view, hidden) { div.appendChild(this.grid); } this.grid.grid._cache_update = async (s, e) => { - json = await fill_page(view, json, hidden, s, e + 1); - this.grid.set_data(json, schema); + json = await fill_page(view, json, hidden, s, e + 10); + let rows = psp2hypergrid(json, schema, s, Math.min(e + 10, nrows), nrows).rows; + rows[0] = this.grid.grid.behavior.dataModel.viewData[0]; + this.grid.grid.setData({data: rows}); } if (visible_rows.length > 0) { this.grid.set_data(json, schema);
Reduced iteration per page to only visible rows.
finos_perspective
train
731b19b3fe22b5d83059e0ad95032f563cee01f5
diff --git a/core/src/main/java/hudson/matrix/MatrixProject.java b/core/src/main/java/hudson/matrix/MatrixProject.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/hudson/matrix/MatrixProject.java +++ b/core/src/main/java/hudson/matrix/MatrixProject.java @@ -40,6 +40,7 @@ import hudson.model.Items; import hudson.model.JDK; import hudson.model.Job; import hudson.model.Label; +import hudson.model.Node; import hudson.model.Queue.FlyweightTask; import hudson.model.ResourceController; import hudson.model.Result; @@ -55,6 +56,8 @@ import hudson.tasks.Publisher; import hudson.triggers.Trigger; import hudson.util.CopyOnWriteMap; import hudson.util.DescribableList; +import hudson.util.FormValidation; +import hudson.util.FormValidation.Kind; import net.sf.json.JSONObject; import org.kohsuke.stapler.HttpResponse; import org.kohsuke.stapler.StaplerRequest; @@ -93,7 +96,7 @@ public class MatrixProject extends AbstractProject<MatrixProject,MatrixBuild> im private volatile AxisList axes = new AxisList(); /** - * The filter that is applied to combinatios. It is a Groovy if condition. + * The filter that is applied to combinations. It is a Groovy if condition. * This can be null, which means "true". * * @see #getCombinationFilter() @@ -617,7 +620,10 @@ public class MatrixProject extends AbstractProject<MatrixProject,MatrixBuild> im private void checkAxisNames(Iterable<Axis> newAxes) throws FormException { HashSet<String> axisNames = new HashSet<String>(); for (Axis a : newAxes) { - a.getDescriptor().doCheckName(a.getName()); + FormValidation fv = a.getDescriptor().doCheckName(a.getName()); + if (fv.kind!=Kind.OK) + throw new FormException(Messages.MatrixProject_DuplicateAxisName(),fv,"axis.name"); + if (axisNames.contains(a.getName())) throw new FormException(Messages.MatrixProject_DuplicateAxisName(),"axis.name"); axisNames.add(a.getName());
FormValidation error needs to be checked
jenkinsci_jenkins
train
2f7fc506ea683b5d79ee23201561a5658597285d
diff --git a/src/playbacks/html5_video/html5_video.js b/src/playbacks/html5_video/html5_video.js index <HASH>..<HASH> 100644 --- a/src/playbacks/html5_video/html5_video.js +++ b/src/playbacks/html5_video/html5_video.js @@ -359,18 +359,20 @@ export default class HTML5Video extends Playback { if (!this.el.buffered.length) { return } - var bufferedPos = 0 - for (var i = 0; i < this.el.buffered.length; i++) { - if (this.el.currentTime >= this.el.buffered.start(i) && this.el.currentTime <= this.el.buffered.end(i)) { + let buffered = [] + let bufferedPos = 0 + for (let i = 0; i < this.el.buffered.length; i++) { + buffered = [...buffered, {start: this.el.buffered.start(i), end: this.el.buffered.end(i)}] + if (this.el.currentTime >= buffered[i].start && this.el.currentTime <= buffered[i].end) { bufferedPos = i - break } } - this.trigger(Events.PLAYBACK_PROGRESS, { - start: this.el.buffered.start(bufferedPos), - current: this.el.buffered.end(bufferedPos), + const progress = { + start: buffered[bufferedPos].start, + current: buffered[bufferedPos].end, total: this.el.duration - }) + } + this.trigger(Events.PLAYBACK_PROGRESS, progress, buffered) } _typeFor(src) { diff --git a/test/playbacks/html5_video_spec.js b/test/playbacks/html5_video_spec.js index <HASH>..<HASH> 100644 --- a/test/playbacks/html5_video_spec.js +++ b/test/playbacks/html5_video_spec.js @@ -75,6 +75,61 @@ describe('HTML5Video playback', function() { expect(playback.el.controls).to.be.true }) + describe('progress', function() { + let start, end, currentTime + const duration = 300 + beforeEach(function() { + this.playback = new HTML5Video(this.options) + currentTime = 0 + start = [0] + end = [30] + let fakeEl = { + get currentTime() { return currentTime }, + get duration() { return duration }, + get buffered() { return {start: (i) => start[i], end: (i) => end[i], get length() { return start.length }} } + } + this.playback.setElement(fakeEl) + }) + + it('should trigger PLAYBACK_PROGRESS with current buffer position', function() { + let progress + this.playback.on(Events.PLAYBACK_PROGRESS, function(currentProgress) { + progress = currentProgress + }) + this.playback._onProgress() // cannot trigger event on fake element (improve later?) + expect(progress.start).to.be.equal(start[0]) + expect(progress.current).to.be.equal(end[0]) + expect(progress.total).to.be.equal(duration) + }) + + it('should find current buffer position', function() { + start = [0, 50, 180] + end = [30, 90, 280] + currentTime = 75 // this should be located at index 1 + let progress + this.playback.on(Events.PLAYBACK_PROGRESS, function(currentProgress) { + progress = currentProgress + }) + this.playback._onProgress() // cannot trigger event on fake element (improve later?) + expect(progress.start).to.be.equal(start[1]) + expect(progress.current).to.be.equal(end[1]) + }) + + it('should return an array of buffer segments as {start, end} objects', function() { + start = [0, 50, 180] + end = [30, 90, 280] + let buffered + this.playback.on(Events.PLAYBACK_PROGRESS, function(currentProgress, bufferedSegments) { + buffered = bufferedSegments + }) + this.playback._onProgress() // cannot trigger event on fake element (improve later?) + expect(buffered.length).to.be.equal(start.length) + expect(buffered[0]).to.deep.equal({start: start[0], end: end[0]}) + expect(buffered[1]).to.deep.equal({start: start[1], end: end[1]}) + expect(buffered[2]).to.deep.equal({start: start[2], end: end[2]}) + }) + }) + describe('audio resources', function() { it('should be able to play audio resources', function() { expect(HTML5Video.canPlay('http://domain.com/Audio.oga')).to.be.true
html5 video: add buffered segments to progress event
clappr_clappr
train
8c225baeb2f1c657d22c87db93435751164ddf8a
diff --git a/lib/assets/FastfileTemplate b/lib/assets/FastfileTemplate index <HASH>..<HASH> 100644 --- a/lib/assets/FastfileTemplate +++ b/lib/assets/FastfileTemplate @@ -27,6 +27,7 @@ platform :ios do # ENV["SLACK_URL"] = "https://hooks.slack.com/services/..." cocoapods + carthage # increment_build_number diff --git a/lib/fastlane/setup.rb b/lib/fastlane/setup.rb index <HASH>..<HASH> 100644 --- a/lib/fastlane/setup.rb +++ b/lib/fastlane/setup.rb @@ -88,6 +88,7 @@ module Fastlane @tools[:snapshot] = File.exist?(File.join(folder, 'Snapfile')) @tools[:xctool] = File.exist?(File.join(File.expand_path('..', folder), '.xctool-args')) @tools[:cocoapods] = File.exist?(File.join(File.expand_path('..', folder), 'Podfile')) + @tools[:carthage] = File.exist?(File.join(File.expand_path('..', folder), 'Cartfile')) @tools[:sigh] = false end @@ -137,7 +138,8 @@ module Fastlane template.gsub!('snapshot', '# snapshot') unless @tools[:snapshot] template.gsub!('sigh', '# sigh') unless @tools[:sigh] template.gsub!('xctool', '# xctool') unless @tools[:xctool] - template.gsub!('cocoapods', '# cocoapods') unless @tools[:cocoapods] + template.gsub!('cocoapods', '') unless @tools[:cocoapods] + template.gsub!('carthage', '') unless @tools[:carthage] template.gsub!('[[FASTLANE_VERSION]]', Fastlane::VERSION) @tools.each do |key, value| diff --git a/spec/setup_spec.rb b/spec/setup_spec.rb index <HASH>..<HASH> 100644 --- a/spec/setup_spec.rb +++ b/spec/setup_spec.rb @@ -28,7 +28,7 @@ describe Fastlane do Fastlane::FastlaneFolder.create_folder!(workspace) setup = Fastlane::Setup.new expect(setup.run).to eq(true) - expect(setup.tools).to eq({deliver: true, snapshot: true, xctool: true, cocoapods: true, sigh: true}) + expect(setup.tools).to eq({deliver: true, snapshot: true, xctool: true, cocoapods: true, sigh: true, carthage: false}) content = File.read(File.join(Fastlane::FastlaneFolder.path, 'Fastfile')) expect(content).to include "# update_fastlane"
Added carthage to fastlane by default if already used
fastlane_fastlane
train
edc3354697491f5d7b2f76e3306b20b9c932723c
diff --git a/lib/config.js b/lib/config.js index <HASH>..<HASH> 100644 --- a/lib/config.js +++ b/lib/config.js @@ -220,7 +220,7 @@ const normalizePluginHelpers = (items, subCfg) => { * templates: {'*': {'javascripts/app.js': checker2}} * } */ -const createJoinConfig = configFiles => { +const createJoinConfig = (configFiles, paths) => { const types = Object.keys(configFiles); checkFilesKeys(configFiles); @@ -258,6 +258,10 @@ const createJoinConfig = configFiles => { } Object.keys(fileCfg.entryPoints).forEach(target => { + const isTargetWatched = paths.watched.some(path => target.indexOf(path + '/') === 0); + if (!isTargetWatched) { + logger.warn(`The correct use of entry points is: \`'entryFile.js': 'outputFile.js'\`. You are trying to use '${target}' as an entry point, but it is probably an output file.`); + } const outFiles = Object.keys(fileCfg.entryPoints[target]); if (outFiles.some(out => joinConfig[type][out])) { logger.warn(`config.files.${type}.joinTo is already defined for '${target}', can't add an entry point`); @@ -421,7 +425,7 @@ const warnAboutConfigDeprecations = config => { const normalizeConfig = config => { const normalized = {}; - normalized.join = createJoinConfig(config.files); + normalized.join = createJoinConfig(config.files, config.paths); const mod = config.modules; normalized.modules = {}; normalized.modules.wrapper = mdls.normalizeWrapper(mod.wrapper, config.modules.nameCleaner);
Print a warning if 'entryPoints' key-values are flipped
brunch_brunch
train
23d60e1a1112a60fa80ca62002f80fbc7a1d7922
diff --git a/config/projects/chefdk.rb b/config/projects/chefdk.rb index <HASH>..<HASH> 100644 --- a/config/projects/chefdk.rb +++ b/config/projects/chefdk.rb @@ -23,7 +23,7 @@ install_path "/opt/chefdk" build_version Omnibus::BuildVersion.full build_iteration 4 -override :berkshelf, version: "3.0.0.beta7" +override :berkshelf, version: "v3.0.0.beta7" override :bundler, version: "1.5.2" override :libedit, version: "20130712-3.1" override :libtool, version: "2.4.2"
Set berks version to git tag berks' version tags have a "v" in the front.
chef_chef
train
d9f8aea726758bfb8f29580e52800ba7b35d3203
diff --git a/gruntfile.js b/gruntfile.js index <HASH>..<HASH> 100644 --- a/gruntfile.js +++ b/gruntfile.js @@ -10,7 +10,8 @@ module.exports = function(grunt) { csscss: { options: { verbose: true, - outputJson: false + outputJson: false, + minMatch: 2 }, dist: { src: ['test/example/style.css', 'test/example/another.css'] diff --git a/tasks/csscss.js b/tasks/csscss.js index <HASH>..<HASH> 100644 --- a/tasks/csscss.js +++ b/tasks/csscss.js @@ -32,6 +32,14 @@ module.exports = function(grunt) { } /** + * Checks to see if the minimum match argument should be enforced. + */ + if (options.minMatch) { + args.push('-n') + args.push(options.minMatch); + } + + /** * adds path to file, to be analysed, as an argument. */ args.push(f); diff --git a/test/example/style.css b/test/example/style.css index <HASH>..<HASH> 100644 --- a/test/example/style.css +++ b/test/example/style.css @@ -9,3 +9,8 @@ margin: 0; padding: 0; } + +.rule-c { + border: 1px solid #000; + margin: 0; +}
added support for the minimum match argument, which will ignore any rulesets that have fewer matches.
peterkeating_grunt-csscss
train
07e3ca48fe193e7c1dd4c258f5a038b73088a4da
diff --git a/plaso/formatters/shutdown.py b/plaso/formatters/shutdown.py index <HASH>..<HASH> 100644 --- a/plaso/formatters/shutdown.py +++ b/plaso/formatters/shutdown.py @@ -5,7 +5,6 @@ from __future__ import unicode_literals from plaso.formatters import interface from plaso.formatters import manager -from plaso.lib import errors class ShutdownWindowsRegistryEventFormatter( @@ -24,36 +23,6 @@ class ShutdownWindowsRegistryEventFormatter( SOURCE_LONG = 'Registry Key Shutdown Entry' SOURCE_SHORT = 'REG' - # pylint: disable=unused-argument - def GetMessages(self, formatter_mediator, event_data): - """Determines the formatted message strings for the event data. - - Args: - formatter_mediator (FormatterMediator): mediates the interactions - between formatters and other components, such as storage and Windows - EventLog resources. - event_data (EventData): event data. - - Returns: - tuple(str, str): formatted message string and short message string. - - Raises: - WrongFormatter: if the event data cannot be formatted by the formatter. - """ - if self.DATA_TYPE != event_data.data_type: - raise errors.WrongFormatter('Unsupported data type: {0:s}.'.format( - event_data.data_type)) - - event_values = event_data.CopyToDict() - - regvalue = event_values.get('regvalue', {}) - string_parts = [] - for key, value in sorted(regvalue.items()): - string_parts.append('{0:s}: {1!s}'.format(key, value)) - event_values['text'] = ' '.join(string_parts) - - return self._ConditionalFormatMessages(event_values) - manager.FormattersManager.RegisterFormatter( ShutdownWindowsRegistryEventFormatter)
Removed regvalue from shutdown formatter #<I> (#<I>)
log2timeline_plaso
train
ea295fcc26638b1cec093c10a36f5b0d3bf72f9a
diff --git a/lib/vagrant-mutate/converter.rb b/lib/vagrant-mutate/converter.rb index <HASH>..<HASH> 100644 --- a/lib/vagrant-mutate/converter.rb +++ b/lib/vagrant-mutate/converter.rb @@ -49,7 +49,7 @@ module VagrantMutate @logger.info "qemu-img info output\n#{info}" if info =~ /(\d+) bytes/ size_in_gb = $1.to_i / (1024 * 1024 * 1024) - return "#{size_in_gb}G" + return size_in_gb else raise Errors::DetermineImageSizeFailed end
Virtual size should be number, not string with unit
sciurus_vagrant-mutate
train
f36ffda683aef6a7eaad8fa08db9148e29902d6e
diff --git a/service.js b/service.js index <HASH>..<HASH> 100755 --- a/service.js +++ b/service.js @@ -225,14 +225,15 @@ class ServiceConsul extends service.Service { // TODO where does baseUrl come from ? delete this.consulOptions.baseUrl; - // TODO repeat here ? - try { - this.consul = require('consul')(this.consulOptions); - } catch (e) { - this.error(`Unable to create consul connection ${e}`); + return modified; + } + + get consul() { + if (!this._consul) { + this._consul = require('consul')(this.consulOptions); } - return modified; + return this._consul; } get serviceDefinition() { @@ -274,48 +275,48 @@ class ServiceConsul extends service.Service { _start() { return super._start().then(() => { this.updateTags(); - // wait until health-check and koa services are present return ServiceConsumerMixin.defineServiceConsumerProperties(this, { - listener: { - name: 'koa-admin', - type: 'koa' - }, - hcs: { - name: 'health-check', - type: 'health-check' - } - }, this.owner, true).then(() => - this.listener.start().then(() => - /* PromiseRepeat( - () =>*/ - this.consul.agent.service.register(this.serviceDefinition).then(fullfilled => { - this._stepRegisteredListener = step => { - this.updateTags(); - this.update(5000); - }; - - this.owner.addListener('stepRegistered', this._stepRegisteredListener); - - this.listener.koa.use(route.get(this.checkPath, ctx => - this.hcs.endpoints.state.receive({}).then(r => { - this.status = r ? 200 : 300; - ctx.body = r ? 'OK' : 'ERROR'; + listener: { + name: 'koa-admin', + type: 'koa' + }, + hcs: { + name: 'health-check', + type: 'health-check' + } + }, this.owner, true) + .then(() => + this.listener.start().then(() => + PromiseRepeat( + () => + this.consul.agent.service.register(this.serviceDefinition).then(fullfilled => { + this._stepRegisteredListener = step => { + this.updateTags(); + this.update(5000); + }; + + this.owner.addListener('stepRegistered', this._stepRegisteredListener); + + this.listener.koa.use(route.get(this.checkPath, ctx => + this.hcs.endpoints.state.receive({}).then(r => { + this.status = r ? 200 : 300; + ctx.body = r ? 'OK' : 'ERROR'; + }) + )); + + return Promise.resolve(); + }), { + maxAttempts: 5, + minTimeout: 1000, + maxTimeout: this.startTimeout * 1000, + throttle: 1000 }) - )); - - return Promise.resolve(); - }), { - maxAttempts: 5, - minTimeout: 1000, - maxTimeout: this.startTimeout * 1000, - throttle: 1000 - }) - // ) - ); + )); }); } + /** * deregister the service from consul * @return {Promise} that fullfills when the deregitering has finished diff --git a/tests/service_test.js b/tests/service_test.js index <HASH>..<HASH> 100644 --- a/tests/service_test.js +++ b/tests/service_test.js @@ -20,7 +20,7 @@ describe('consul service', function () { }, { logLevel: 'trace', name: 'consul', - port: 4713, + //port: 4713, checkInterval: 100 }], [ServiceConsul, require('kronos-service-health-check'), require('kronos-service-koa')]).then( manager => {
fix: mode agent connection out of _configure() method
Kronos-Integration_kronos-service-consul
train
6b4741818ef9b9be9069b41ead2e019c4410fb26
diff --git a/shared/fetcher.js b/shared/fetcher.js index <HASH>..<HASH> 100644 --- a/shared/fetcher.js +++ b/shared/fetcher.js @@ -170,27 +170,27 @@ Fetcher.prototype.isMissingKeys = function(modelData, keys) { }; Fetcher.prototype.fetchFromApi = function(spec, options, callback) { - var model = this.getModelOrCollectionForSpec(spec, null, options), - fetcher = this; - - model.fetch({ - headers: options.headers || {}, - timeout: options.timeout || 0, - data: spec.params, - success: function(model, body) { - callback(null, model); - }, - error: function(model, resp, options) { - var body, respOutput, err; - - body = resp.body; - resp.body = typeof body === 'string' ? body.slice(0, 150) : body; - respOutput = JSON.stringify(resp); - err = new Error("ERROR fetching model '" + fetcher.modelUtils.modelName(model.constructor) + "' with options '" + JSON.stringify(options) + "'. Response: " + respOutput); - err.status = resp.status; - err.body = body; - callback(err); - } + var fetcher = this; + this.getModelOrCollectionForSpec(spec, null, options, function(model) { + model.fetch({ + headers: options.headers || {}, + timeout: options.timeout || 0, + data: spec.params, + success: function(model, body) { + callback(null, model); + }, + error: function(model, resp, options) { + var body, respOutput, err; + + body = resp.body; + resp.body = typeof body === 'string' ? body.slice(0, 150) : body; + respOutput = JSON.stringify(resp); + err = new Error("ERROR fetching model '" + fetcher.modelUtils.modelName(model.constructor) + "' with options '" + JSON.stringify(options) + "'. Response: " + respOutput); + err.status = resp.status; + err.body = body; + callback(err); + } + }); }); }; diff --git a/test/shared/fetcher.test.js b/test/shared/fetcher.test.js index <HASH>..<HASH> 100644 --- a/test/shared/fetcher.test.js +++ b/test/shared/fetcher.test.js @@ -498,7 +498,7 @@ describe('fetcher', function() { done(); }); fetcher.pendingFetches.should.eql(1); - }); + }); it("should be able to fetch both a model and a collection at the same time", function(done) { var fetchSpec; @@ -829,4 +829,31 @@ describe('fetcher', function() { expect(result.options.params).to.deep.equal(params); }); }); + + describe('fetchFromApi', function(done) { + var spec, options, callbackSpy, modelMock; + + beforeEach(function () { + spec = { model: 'SomeModel' }; + options = {readFromCache: false}; + callbackSpy = sinon.spy(); + modelMock = {fetch: sinon.spy()}; + }); + + it('should call the getModelOrCollectionForSpec with callback', function (done) { + var lastCall, getModelOrCollectionForSpecStub = sinon.stub(fetcher, 'getModelOrCollectionForSpec'); + getModelOrCollectionForSpecStub.callsArgWith(3, modelMock); + + fetcher.fetchFromApi(spec, options, callbackSpy); + + callbackSpy.should.have.not.been.called; + + getModelOrCollectionForSpecStub.should.have.been.calledOnce; + getModelOrCollectionForSpecStub.should.have.been.calledWith(spec, null, options); + + modelMock.fetch.should.have.been.calledOnce; + + done(); + }); + }); });
Fixes fetchFromApi to work in amd (async) environments
rendrjs_rendr
train
b4215603cf7f491c7774dc2c83f51829a30c6d74
diff --git a/jquery-scrolltofixed.js b/jquery-scrolltofixed.js index <HASH>..<HASH> 100644 --- a/jquery-scrolltofixed.js +++ b/jquery-scrolltofixed.js @@ -147,7 +147,7 @@ 'bottom' : base.options.bottom == -1?'':base.options.bottom, 'margin-left' : '0px' } - if (!base.options.dontSetWidth){ cssOptions['width']=target.width(); }; + if (!base.options.dontSetWidth){ cssOptions['width']=target.css('width'); }; target.css(cssOptions); @@ -178,7 +178,7 @@ 'margin-left' : '0px', 'bottom' : '' } - if (!base.options.dontSetWidth){ cssOptions['width']=target.width(); }; + if (!base.options.dontSetWidth){ cssOptions['width']=target.css('width'); }; target.css(cssOptions);
Make setWidth work with border-box
bigspotteddog_ScrollToFixed
train
2947f7856b3a30c0f5ea5c37193b997a8e564fe3
diff --git a/khayyam/constants.py b/khayyam/constants.py index <HASH>..<HASH> 100644 --- a/khayyam/constants.py +++ b/khayyam/constants.py @@ -142,10 +142,10 @@ AM_PM_REGEX = u'(%s)' % u'|'.join(AM_PM.values()) AM_PM_ASCII_REGEX = '([aA][mM]|[pP][mM])' -HOUR12_REGEX = '([0]?[1-9]|1[0-2])' # TODO: Precisest pattern -HOUR24_REGEX = '\d{1,2}' # TODO: Precisest pattern -MINUTE_REGEX = '\d{1,2}' # TODO: Precisest pattern -SECOND_REGEX = '\d{1,2}' # TODO: Precisest pattern +HOUR12_REGEX = '([0]?[1-9]|1[0-2])' +HOUR24_REGEX = '([0]?\d|1\d|2[0-3])' +MINUTE_REGEX = '([0]?\d|[1-5]\d)' +SECOND_REGEX = '([0]?\d|[1-5]\d)' MICROSECOND_REGEX = '\d{1,6}' UTC_OFFSET_FORMAT_REGEX = '([-+]?\d{2}:\d{2}|)' TZ_NAME_FORMAT_REGEX='.+' diff --git a/khayyam/jalali_date.py b/khayyam/jalali_date.py index <HASH>..<HASH> 100644 --- a/khayyam/jalali_date.py +++ b/khayyam/jalali_date.py @@ -21,7 +21,6 @@ from khayyam.formatting import JalaliDateFormatter __author__ = 'vahid' -# TODO: replace(*) method for this class class JalaliDate(object): """ Representing the Jalali Date, without the time data. @@ -137,6 +136,7 @@ class JalaliDate(object): return JalaliDate(self.year, self.month, self.day) def replace(self, year=None, month=None, day=None): + year, month, day = self._validate( year if year else self.year, month if month else self.month, diff --git a/khayyam/jalali_datetime.py b/khayyam/jalali_datetime.py index <HASH>..<HASH> 100644 --- a/khayyam/jalali_datetime.py +++ b/khayyam/jalali_datetime.py @@ -147,25 +147,6 @@ class JalaliDatetime(JalaliDate): return cls(**result) - # @classmethod - # def strptime(cls, date_string, frmt): - # """ - # Return a datetime corresponding to date_string, parsed according to format. This is equivalent to datetime(*(_time.strptime(date_string, format)[0:6])). ValueError is raised if the date_string and format can't be parsed by _time.strptime() or if it returns a value which isn't a _time tuple. See section strftime() and strptime() Behavior. - # '1387/4/12' - # '%Y/%m/%d' - # """ - # # TODO: Implement full features of python, see: http://docs.python.org/library/datetime.html - # valid_codes = {'%Y': (4, 'year'), - # '%m': (2, 'month'), - # '%d': (2, 'day'), - # '%H': (2, 'hour'), - # '%M': (2, 'minute'), - # '%S': (2, 'second'), - # '%f': (6, 'microsecond') - # } - # - # return parse(cls, date_string, frmt, valid_codes) - ######################## ### Instance Methods ### ######################## @@ -204,12 +185,6 @@ class JalaliDatetime(JalaliDate): self.second if second is None else second, self.microsecond if microsecond is None else microsecond, self.tzinfo if tzinfo is None else tzinfo) - # TODO: Test Case required - # if hour: result.hour = hour - # if minute: result.minute = minute - # if second: result.second = second - # if microsecond: result.microsecond = microsecond - # if tzinfo: result.tzinfo = tzinfo return result def astimezone(self, tz): diff --git a/khayyam/timezones.py b/khayyam/timezones.py index <HASH>..<HASH> 100644 --- a/khayyam/timezones.py +++ b/khayyam/timezones.py @@ -76,7 +76,6 @@ class Timezone(tzinfo): class TehranTimezone(Timezone): dst_start = (1, 1) dst_end = (7, 1) - # TODO: TEST Required def __init__(self): super(TehranTimezone, self).__init__( diff --git a/practice.py b/practice.py index <HASH>..<HASH> 100644 --- a/practice.py +++ b/practice.py @@ -3,8 +3,8 @@ import re __author__ = 'vahid' -for i in range(0, 14): - p = '^([0]?[1-9]|1[0-2])$' +for i in range(0, 61): + p = '^([0]?\d|[1-5]\d)$' try: if int(re.match(p, '%d' % i).group()) != i: print(i)
<I> -alpha ! some methods renamed, new formatter/parser engine fully tested.
pylover_khayyam
train
33be39fa004821b9cfebafa7e61eb57a18a7d520
diff --git a/lib/odf-report/table.rb b/lib/odf-report/table.rb index <HASH>..<HASH> 100644 --- a/lib/odf-report/table.rb +++ b/lib/odf-report/table.rb @@ -25,7 +25,7 @@ class Table @collection = get_collection_from_item(row, @collection_field) if row - if (@skip_if_empty || !@header) && @collection.empty? + if @skip_if_empty && @collection.empty? table.remove return end
if you said you want to skip, skip it
sandrods_odf-report
train
4cdd35d636570686f20d2babe458b4a74bf3f9fa
diff --git a/src/livestreamer_curses/main.py b/src/livestreamer_curses/main.py index <HASH>..<HASH> 100755 --- a/src/livestreamer_curses/main.py +++ b/src/livestreamer_curses/main.py @@ -40,6 +40,13 @@ from livestreamer import Livestreamer from multiprocessing.pool import ThreadPool as Pool from multiprocessing import Manager +PY3 = sys.version_info.major >= 3 + +def dictiter(d): + if PY3: + return d.items() + return d.iteritems() + try: from gdbm import error as GDBMError except: @@ -115,9 +122,9 @@ class ProcessList(object): def get_finished(self): """ Clean up terminated processes and returns the list of their ids """ indices = [] - for id, v in self.q.items(): + for idf, v in dictiter(self.q): if v.poll() != None: - indices.append(id) + indices.append(idf) for i in indices: self.q.pop(i) @@ -769,7 +776,7 @@ class StreamList(object): actual_res = s_res elif type(s_res) == dict: actual_res = DEFAULT_RESOLUTION_HARD - for (k,v) in s_res.iteritems(): + for k,v in dictiter(s_res): if k in url: actual_res = v break
python 2/3 compatible iterators (ugly as sh*t)
gapato_livestreamer-curses
train
d37546c074655b7042f01dae3cce4f0facab6bf2
diff --git a/glances/outputs/glances_bottle.py b/glances/outputs/glances_bottle.py index <HASH>..<HASH> 100644 --- a/glances/outputs/glances_bottle.py +++ b/glances/outputs/glances_bottle.py @@ -25,6 +25,7 @@ import sys import tempfile from io import open import webbrowser +from zlib import compress from glances.timer import Timer from glances.logger import logger @@ -36,8 +37,18 @@ except ImportError: sys.exit(2) -class GlancesBottle(object): +def gzip_compress(func): + """Compress result with Gzip algorithm if the client ask for it.""" + def wrapper(*args, **kwargs): + ret = func(*args, **kwargs) + if 'gzip' in request.headers.get('Accept-Encoding', ''): + response.headers['Content-Encoding'] = 'gzip' + ret = compress(ret.encode('utf-8')) + return ret + return wrapper + +class GlancesBottle(object): """This class manages the Bottle Web server.""" API_VERSION = '2' @@ -205,6 +216,7 @@ class GlancesBottle(object): # Return the static file return static_file(filepath, root=self.STATIC_PATH) + @gzip_compress def _api_help(self): """Glances API RESTFul implementation. @@ -220,8 +232,10 @@ class GlancesBottle(object): abort(404, "Cannot get help view data (%s)" % str(e)) return plist + @gzip_compress def _api_plugins(self): - """ + """Glances API RESTFul implementation. + @api {get} /api/%s/pluginslist Get plugins list @apiVersion 2.0 @apiName pluginslist @@ -256,6 +270,7 @@ class GlancesBottle(object): abort(404, "Cannot get plugin list (%s)" % str(e)) return plist + @gzip_compress def _api_all(self): """Glances API RESTFul implementation. @@ -282,8 +297,10 @@ class GlancesBottle(object): statval = json.dumps(self.stats.getAllAsDict()) except Exception as e: abort(404, "Cannot get stats (%s)" % str(e)) + return statval + @gzip_compress def _api_all_limits(self): """Glances API RESTFul implementation. @@ -301,6 +318,7 @@ class GlancesBottle(object): abort(404, "Cannot get limits (%s)" % (str(e))) return limits + @gzip_compress def _api_all_views(self): """Glances API RESTFul implementation. @@ -318,6 +336,7 @@ class GlancesBottle(object): abort(404, "Cannot get views (%s)" % (str(e))) return limits + @gzip_compress def _api(self, plugin): """Glances API RESTFul implementation. @@ -341,6 +360,7 @@ class GlancesBottle(object): abort(404, "Cannot get plugin %s (%s)" % (plugin, str(e))) return statval + @gzip_compress def _api_history(self, plugin, nb=0): """Glances API RESTFul implementation. @@ -365,6 +385,7 @@ class GlancesBottle(object): abort(404, "Cannot get plugin history %s (%s)" % (plugin, str(e))) return statval + @gzip_compress def _api_limits(self, plugin): """Glances API RESTFul implementation. @@ -388,6 +409,7 @@ class GlancesBottle(object): abort(404, "Cannot get limits for plugin %s (%s)" % (plugin, str(e))) return ret + @gzip_compress def _api_views(self, plugin): """Glances API RESTFul implementation. @@ -411,8 +433,9 @@ class GlancesBottle(object): abort(404, "Cannot get views for plugin %s (%s)" % (plugin, str(e))) return ret + @gzip_compress def _api_itemvalue(self, plugin, item, value=None, history=False, nb=0): - """Father method for _api_item and _api_value""" + """Father method for _api_item and _api_value.""" response.content_type = 'application/json' if plugin not in self.plugins_list: @@ -441,6 +464,7 @@ class GlancesBottle(object): return ret + @gzip_compress def _api_item(self, plugin, item): """Glances API RESTFul implementation. @@ -452,6 +476,7 @@ class GlancesBottle(object): """ return self._api_itemvalue(plugin, item) + @gzip_compress def _api_item_history(self, plugin, item, nb=0): """Glances API RESTFul implementation. @@ -463,6 +488,7 @@ class GlancesBottle(object): """ return self._api_itemvalue(plugin, item, history=True, nb=int(nb)) + @gzip_compress def _api_value(self, plugin, item, value): """Glances API RESTFul implementation. @@ -473,6 +499,7 @@ class GlancesBottle(object): """ return self._api_itemvalue(plugin, item, value) + @gzip_compress def _api_config(self): """Glances API RESTFul implementation. @@ -489,6 +516,7 @@ class GlancesBottle(object): abort(404, "Cannot get config (%s)" % str(e)) return args_json + @gzip_compress def _api_config_item(self, item): """Glances API RESTFul implementation. @@ -510,6 +538,7 @@ class GlancesBottle(object): abort(404, "Cannot get config item (%s)" % str(e)) return args_json + @gzip_compress def _api_args(self): """Glances API RESTFul implementation. @@ -528,6 +557,7 @@ class GlancesBottle(object): abort(404, "Cannot get args (%s)" % str(e)) return args_json + @gzip_compress def _api_args_item(self, item): """Glances API RESTFul implementation.
Implement Gzip compression on the server side. Test Ok with Curl but the Web UI did not work anymore...
nicolargo_glances
train
3b49f58ad7d91cfeaccc831abd06f173c4b828b8
diff --git a/lib/classes/caption/Field/Value.php b/lib/classes/caption/Field/Value.php index <HASH>..<HASH> 100644 --- a/lib/classes/caption/Field/Value.php +++ b/lib/classes/caption/Field/Value.php @@ -108,7 +108,7 @@ class caption_Field_Value implements cache_cacheableInterface $this->fetchVocabulary($vocabularyType, $vocabularyId); if ($this->vocabularyType) { - if (! $this->databox_field->getVocabularyControl()) { + if (!$this->databox_field->getVocabularyControl()) { // Vocabulary Control has been deactivated $this->removeVocabulary(); } elseif ($this->databox_field->getVocabularyControl()->getType() !== $this->vocabularyType->getType()) { @@ -293,7 +293,7 @@ class caption_Field_Value implements cache_cacheableInterface $connection = $databox_field->get_connection(); // Check consistency - if (! $databox_field->is_multi()) { + if (!$databox_field->is_multi()) { try { $field = $record->get_caption()->get_field($databox_field->get_name()); $values = $field->get_values(); @@ -301,7 +301,7 @@ class caption_Field_Value implements cache_cacheableInterface // Field was not found, so no values found either $values = []; } - if (! empty($values)) { + if (!empty($values)) { /** @var caption_Field_Value $caption_field_value */ $caption_field_value = reset($values); $caption_field_value->set_value($value); @@ -343,7 +343,7 @@ class caption_Field_Value implements cache_cacheableInterface $tbranch = $this->databox_field->get_tbranch(); - if (! $tbranch || ! $XPATH_thesaurus) { + if (!$tbranch || !$XPATH_thesaurus) { return $value; } @@ -384,7 +384,7 @@ class caption_Field_Value implements cache_cacheableInterface list($term, $context) = $this->splitTermAndContext(str_replace(["[[em]]", "[[/em]]"], ["", ""], $value)); // a value has been found in thesaurus, update value & set the query to bounce to the value $this->value = $bestnode->getAttribute('v'); - $this->qjs = $term . ($context ? '['.$context.']' : ''); + $this->qjs = $term . ($context ? '[' . $context . ']' : ''); $this->isThesaurusValue = true; } else { $this->isThesaurusValue = false;
Scrutinizer Auto-Fixes This commit consists of patches automatically generated for this project on <URL>
alchemy-fr_Phraseanet
train
26751ef6b999084a6f53069d0b091b2847c6a203
diff --git a/src/Oauth2/Authentication/Server.php b/src/Oauth2/Authentication/Server.php index <HASH>..<HASH> 100644 --- a/src/Oauth2/Authentication/Server.php +++ b/src/Oauth2/Authentication/Server.php @@ -339,16 +339,78 @@ maintenance of the server.', } /** - * Complete the authorisation code grant + * Issue an access token * * @access public + * * @param array $authParams Optional array of parsed $_POST keys + * * @return array Authorise request parameters */ - public function completeAuthCodeGrant($authParams = null) + public function issueAccessToken($authParams = null) { $params = array(); + // Grant type (must be 'authorization_code') + if ( ! isset($authParams['grant_type']) && + ! isset($_POST['grant_type'])) { + + throw new OAuthServerClientException(sprintf( + $this->errors['invalid_request'], 'grant_type'), 0); + + } else { + + $params['grant_type'] = (isset($authParams['grant_type'])) ? + $authParams['grant_type'] : $_POST['grant_type']; + + // Ensure response type is one that is recognised + if ( ! in_array($params['response_type'], + $this->config['grant_types'])) { + + throw new OAuthServerClientException( + $this->errors['unsupported_grant_type'], 7); + + } + } + + switch ($params['grant_type']) + { + // Authorization code grant + case 'authorization_code': + return $this->completeAuthCodeGrant($authParams, $params); + break; + + // Refresh token + case 'refresh_token': + + // Resource owner password credentials grant + case 'password': + + // Client credentials grant + case 'client_credentials': + + // Unsupported + default: + throw new OAuthServerException($this->errors['server_error'] . + 'Tried to process an unsuppported grant type.', + 5); + break; + } + } + + /** + * Complete the authorisation code grant + * + * @access private + * + * @param array $authParams Array of parsed $_POST keys + * @param array $params Generated parameters from issueAccessToken() + * + * @return array Authorise request parameters + */ + private function completeAuthCodeGrant($authParams = array(), $params = + array()) + { // Client ID if ( ! isset($authParams['client_id']) && ! isset($_POST['client_id'])) { @@ -402,27 +464,6 @@ maintenance of the server.', $this->errors['invalid_client'], 8); } - // Grant type (must be 'authorization_code') - if ( ! isset($authParams['grant_type']) && - ! isset($_POST['grant_type'])) { - - throw new OAuthServerClientException(sprintf( - $this->errors['invalid_request'], 'grant_type'), 0); - - } else { - - $params['grant_type'] = (isset($authParams['grant_type'])) ? - $authParams['grant_type'] : $_POST['grant_type']; - - // Ensure response type is one that is recognised - if ($params['response_type'] !== 'authorization_code') { - - throw new OAuthServerClientException( - $this->errors['unsupported_grant_type'], 7); - - } - } - // The authorization code if ( ! isset($authParams['code']) && ! isset($_GET['code'])) { @@ -477,6 +518,7 @@ maintenance of the server.', * @param string $redirectUri The redirect URI * @param array $params The parameters to be appended to the URL * @param string $query_delimeter The query string delimiter (default: ?) + * * @return string The updated redirect URI */ public function redirectUri($redirectUri, $params = array(),
Broke up the issueAccessToken method to support additional grant types
thephpleague_oauth2-server
train
e0c2ab084b2ccd7891c47eb116cab9deabee3176
diff --git a/spec/api_spec.rb b/spec/api_spec.rb index <HASH>..<HASH> 100644 --- a/spec/api_spec.rb +++ b/spec/api_spec.rb @@ -38,8 +38,9 @@ module Clickatell cmd.should_receive(:with_params).with(:param_one => 'foo', :session_id => '12345').and_return(uri) Net::HTTP.should_receive(:new).with('example.com', 80).and_return(transport=mock('http')) transport.should_receive(:use_ssl=).with(false) - transport.should_receive(:start).and_yield(yielded_transport=mock('http')) - yielded_transport.should_receive(:get).with('/foo/bar?a=b').and_return(raw_response=mock('http response')) + yielded_transport=mock('http') + yielded_transport.should_receive(:get).with('/foo/bar?a=b').and_return([raw_response=mock('http_response'), body = stub('body')]) + transport.should_receive(:start).and_yield(yielded_transport).and_return([raw_response, body]) executor.execute('cmdname', :param_one => 'foo').should == raw_response end @@ -50,8 +51,9 @@ module Clickatell cmd.should_receive(:with_params).with(:param_one => 'foo', :session_id => '12345').and_return(uri) Net::HTTP.should_receive(:new).with('example.com', 443).and_return(transport=mock('http')) transport.should_receive(:use_ssl=).with(true) - transport.should_receive(:start).and_yield(yielded_transport=mock('http')) - yielded_transport.should_receive(:get).with('/foo/bar?a=b').and_return(raw_response=mock('http response')) + yielded_transport=mock('http') + yielded_transport.should_receive(:get).with('/foo/bar?a=b').and_return([raw_response=mock('http_response'), body = stub('body')]) + transport.should_receive(:start).and_yield(yielded_transport).and_return([raw_response, body]) executor.execute('cmdname', :param_one => 'foo').should == raw_response end end
Recent RSpec changes must have changed the behaviour of and_yield because these specs suddenly stopped working.
lukeredpath_clickatell
train
dec09d6027b5472ff55c43dfca090e97c936df8f
diff --git a/venom/rpc/method.py b/venom/rpc/method.py index <HASH>..<HASH> 100644 --- a/venom/rpc/method.py +++ b/venom/rpc/method.py @@ -80,9 +80,8 @@ class Method(object): else: http_rule = self._http_rule - if service: - service_http_rule = '/' + service.__meta__.name.lower().replace('_', '-') - return service_http_rule + http_rule + if service is not None: + return service.__meta__.http_rule + http_rule return http_rule def http_path_params(self) -> Set[str]: diff --git a/venom/rpc/service.py b/venom/rpc/service.py index <HASH>..<HASH> 100644 --- a/venom/rpc/service.py +++ b/venom/rpc/service.py @@ -23,6 +23,12 @@ class ServiceManager(object): return name[:-len(postfix)] return name + @classmethod + def prepare_meta(cls, meta: MetaDict, meta_changes: MetaDict) -> MetaDict: + if not meta_changes.get('http_rule', None): + meta.http_rule = '/' + meta.name.lower().replace('_', '-') + return meta + def register_method(self, method: Method, name: str) -> Method: return method.register(self.service, method.name or name) @@ -33,11 +39,12 @@ class ServiceMeta(type): cls.__methods__ = methods = {} # TODO change to tuple, but still prevent multiple methods with same name. cls.__messages__ = messages = set() - cls.__meta__, meta_changes = meta(bases, members) + meta_, meta_changes = meta(bases, members) if not meta_changes.get('name', None): - cls.__meta__.name = cls.__meta__.manager.generate_service_name(name) + meta_.name = meta_changes.name = meta_.manager.generate_service_name(name) + cls.__meta__ = meta_.manager.prepare_meta(meta_, meta_changes) cls.__manager__ = manager = cls.__meta__.manager(cls, cls.__meta__, meta_changes) for n, m in inspect.getmembers(cls): @@ -85,3 +92,4 @@ class Service(object, metaclass=ServiceMeta): DateTimeConverter, DateConverter) stub = None + http_rule = None diff --git a/venom/util.py b/venom/util.py index <HASH>..<HASH> 100644 --- a/venom/util.py +++ b/venom/util.py @@ -17,6 +17,7 @@ def _meta_obj_to_dict(meta_obj): # FIXME should be AttributeDict[str, Any] MetaDict = AttributeDict + def meta(bases, members, meta_name='Meta') -> Tuple[MetaDict, MetaDict]: meta_ = AttributeDict() for base in bases: @@ -29,7 +30,7 @@ def meta(bases, members, meta_name='Meta') -> Tuple[MetaDict, MetaDict]: if meta_name in members: changes = _meta_obj_to_dict(members[meta_name]) meta_.update(changes) - return meta_, changes + return meta_, AttributeDict(changes) def upper_camelcase(s: str) -> str:
Add ServiceManager.prepare_meta() method and move service http_rule generation into there.
biosustain_venom
train
159a38a79a43398357c2c78d0bffc28b571e5291
diff --git a/src/FlipMove.js b/src/FlipMove.js index <HASH>..<HASH> 100644 --- a/src/FlipMove.js +++ b/src/FlipMove.js @@ -348,10 +348,10 @@ class FlipMove extends Component { const [elements, domNodes] = this.formatChildrenForHooks(); this.props.onFinishAll(elements, domNodes); - - // Reset our variables for the next iteration - this.childrenToAnimate = []; } + + // Reset our variables for the next iteration + this.childrenToAnimate = []; }); // If the placeholder was holding the container open while elements were
Always reset childrenToAnimate when triggering finish hooks
joshwcomeau_react-flip-move
train
31b5d8df7aed57eede4e4839e14c6e51097123fc
diff --git a/molo/commenting/tests/test_views.py b/molo/commenting/tests/test_views.py index <HASH>..<HASH> 100644 --- a/molo/commenting/tests/test_views.py +++ b/molo/commenting/tests/test_views.py @@ -7,12 +7,12 @@ from django.contrib.auth.models import User from django.contrib.contenttypes.models import ContentType from django.contrib.sites.models import Site from django.test import TestCase, Client, override_settings +from django.contrib.auth.models import Group from molo.commenting.models import MoloComment from molo.commenting.forms import MoloCommentForm -from molo.core.models import ArticlePage +from molo.core.models import ArticlePage, SiteLanguage from molo.core.tests.base import MoloTestCaseMixin - urlpatterns = patterns( '', url(r'^commenting/', @@ -265,3 +265,129 @@ class ViewMoreCommentsTest(TestCase, MoloTestCaseMixin): self.assertTrue('report' in crow.prettify()) self.assertTrue(reply.comment in replyrow.prettify()) self.assertFalse('report' in replyrow.prettify()) + + +class TestFrontEndCommentReplies(TestCase, MoloTestCaseMixin): + + def create_comment(self, article, comment, user, parent=None): + return MoloComment.objects.create( + content_type=ContentType.objects.get_for_model(article), + object_pk=article.pk, + content_object=article, + site=Site.objects.get_current(), + user=user, + comment=comment, + parent=parent, + submit_date=datetime.now()) + + def setUp(self): + self.mk_main() + self.english = SiteLanguage.objects.create(locale='en') + self.client = Client() + + self.superuser = User.objects.create_superuser( + username='superuser', + email='superuser@email.com', + password='password' + ) + + self.moderator_group, _created = Group.objects.get_or_create( + name='Moderator') + self.comment_moderator_group, _created = Group.objects.get_or_create( + name='Comment Moderator') + self.expert_group, _created = Group.objects.get_or_create( + name='Expert') + + self.moderator = User.objects.create_user( + username='moderator', + email='moderator@example.com', + password='password', + ) + self.moderator.groups.set([self.moderator_group]) + + self.comment_moderator = User.objects.create_user( + username='comment_moderator', + email='comment_moderator@example.com', + password='password', + ) + self.comment_moderator.groups.set([self.comment_moderator_group]) + + self.expert = User.objects.create_user( + username='expert', + email='expert@example.com', + password='password', + ) + self.expert.groups.set([self.expert_group]) + + # create ordinary user + self.bob = User.objects.create_user( + username='bob', + email='bob@example.com', + password='password', + ) + + self.section = self.mk_section( + self.section_index, title='section') + self.article = self.mk_article(self.section, title='article 1', + subtitle='article 1 subtitle', + slug='article-1') + self.comment = self.create_comment( + article=self.article, + comment="this_is_comment_content", + user=self.bob + ) + + def check_reply_exists(self, client): + response = client.get( + '/sections/{0}/{1}/'.format(self.section.slug, + self.article.slug) + ) + self.assertTrue(response.status_code, 200) + html = BeautifulSoup(response.content, 'html.parser') + [comment] = html.find_all(class_='comment-list__item') + self.assertTrue(comment.find('p', string='this_is_comment_content')) + self.assertTrue(comment.find('a', string='Reply')) + comment_reply_url = comment.find('a', string='Reply')['href'] + + # response = self.client.get(comment_reply_url) + # self.assertTrue(response.status_code, 200) + + def test_expert_can_reply_to_comments_on_front_end(self): + client = Client() + client.login( + username=self.expert.username, password='password') + self.check_reply_exists(client) + + self.check_reply_exists(client) + + def test_moderator_can_reply_to_comments_on_front_end(self): + client = Client() + client.login( + username=self.moderator.username, password='password') + self.check_reply_exists(client) + + self.check_reply_exists(client) + + def test_comment_moderator_can_reply_to_comments_on_front_end(self): + client = Client() + client.login( + username=self.comment_moderator.username, password='password') + self.check_reply_exists(client) + + self.check_reply_exists(client) + + def test_superuser_can_reply_to_comments_on_front_end(self): + client = Client() + client.login( + username=self.superuser.username, password='password') + self.check_reply_exists(client) + + self.check_reply_exists(client) + + def test_ordinary_user_can_reply_to_comments_on_front_end(self): + client = Client() + client.login( + username=self.bob.username, password='password') + self.check_reply_exists(client) + + self.check_reply_exists(client)
Create failing tests for replying on the front end Only an expert should currently be allowed to reply to comments
praekeltfoundation_molo.commenting
train
7706cd9bbbe0f95e950695754d4081638f9fb0be
diff --git a/gcloud/storage/bucket.py b/gcloud/storage/bucket.py index <HASH>..<HASH> 100644 --- a/gcloud/storage/bucket.py +++ b/gcloud/storage/bucket.py @@ -225,13 +225,7 @@ class Bucket(_PropertyMixin): if isinstance(key, Key): return key - # Support Python 2 and 3. - try: - string_type = six.string_types - except NameError: # pragma: NO COVER PY3k - string_type = str - - if isinstance(key, string_type): + if isinstance(key, six.string_types): return Key(bucket=self, name=key) raise TypeError('Invalid key: %s' % key)
Moar no need for 'try: ... except:' when we have 'six'.
googleapis_google-cloud-python
train
e0ed3c294fe7dc862f62bb086602150885642047
diff --git a/bcbio/pipeline/main.py b/bcbio/pipeline/main.py index <HASH>..<HASH> 100644 --- a/bcbio/pipeline/main.py +++ b/bcbio/pipeline/main.py @@ -394,6 +394,8 @@ class RnaseqPipeline(AbstractPipeline): multiplier=alignprep.parallel_multiplier(samples)) as run_parallel: samples = disambiguate.split(samples) samples = run_parallel("process_alignment", samples) + with prun.start(_wres(parallel, ["samtools"]), samples, + config, dirs, "disambiguate") as run_parallel: samples = disambiguate.resolve(samples, run_parallel) with prun.start(_wres(parallel, ["samtools", "cufflinks"]),
RNA-seq disambiguation parallelisation improvements
bcbio_bcbio-nextgen
train
291a1fd048bad23b13619322bffd7b696c2bd507
diff --git a/stockfishpy/stockfishpy.py b/stockfishpy/stockfishpy.py index <HASH>..<HASH> 100644 --- a/stockfishpy/stockfishpy.py +++ b/stockfishpy/stockfishpy.py @@ -121,8 +121,8 @@ class Engine(subprocess.Popen): self.send('position startpos moves {}'.format( self.__listtostring(position))) self.isready() - elif re.match('\s*^(((?:[rnbqkpRNBQKP1-8]+\/){7})[rnbqkpRNBQKP1-8]+)\s([b|w])\s([K|Q|k|q]{1,4})\s(-|[a-h][1-8])\s(\d+\s\d+)$', position): - regexList = re.match('\s*^(((?:[rnbqkpRNBQKP1-8]+\/){7})[rnbqkpRNBQKP1-8]+)\s([b|w])\s([K|Q|k|q]{1,4})\s(-|[a-h][1-8])\s(\d+\s\d+)$', position).groups() + elif re.match('\s*^(((?:[rnbqkpRNBQKP1-8]+\/){7})[rnbqkpRNBQKP1-8]+)\s([b|w])\s([K|Q|k|q|-]{1,4})\s(-|[a-h][1-8])\s(\d+\s\d+)$', position): + regexList = re.match('\s*^(((?:[rnbqkpRNBQKP1-8]+\/){7})[rnbqkpRNBQKP1-8]+)\s([b|w])\s([K|Q|k|q|-]{1,4})\s(-|[a-h][1-8])\s(\d+\s\d+)$', position).groups() fen = regexList[0].split("/") if len(fen) != 8: raise ValueError("expected 8 rows in position part of fen: {0}".format(repr(fen)))
fix FEN pass func refer to Juliano Polito issue
Dani4kor_stockfishpy
train
ba601d0f66874e0d7323ef906bee1aa2e664ad6e
diff --git a/spec/integration/parser/compiler_spec.rb b/spec/integration/parser/compiler_spec.rb index <HASH>..<HASH> 100644 --- a/spec/integration/parser/compiler_spec.rb +++ b/spec/integration/parser/compiler_spec.rb @@ -113,13 +113,10 @@ describe Puppet::Parser::Compiler do end end - # COPY OF UNIT TEST (HAS ONE NEW TEST EMBEDDED) - # describe "when evaluating node classes" do include PuppetSpec::Compiler describe "when provided classes in hash format" do - # NEW INTEGRATION TEST it 'looks up default parameter values from inherited class (PUP-2532)' do catalog = compile_to_catalog(<<-CODE) class a { @@ -353,7 +350,7 @@ describe Puppet::Parser::Compiler do describe 'when resolving class references' do include Matchers::Resource - ## BEFORE + describe 'and classname is a Resource Reference and strict == :error' do before(:each) do Puppet[:strict] = :error
(maint) Remove comments that seems to be from W.I.P
puppetlabs_puppet
train
ccfa8f7ef8384cf03aef84c67caaddb19eb117ed
diff --git a/image.go b/image.go index <HASH>..<HASH> 100644 --- a/image.go +++ b/image.go @@ -183,7 +183,7 @@ func (i *Image) DrawImage(img *Image, options *DrawImageOptions) error { return nil } - w, h := img.restorable.Size() + w, h := img.Size() sx0, sy0, sx1, sy1 := 0, 0, w, h if r := options.SourceRect; r != nil { sx0 = r.Min.X @@ -214,7 +214,7 @@ func (i *Image) DrawImage(img *Image, options *DrawImageOptions) error { // Bounds returns the bounds of the image. func (i *Image) Bounds() image.Rectangle { - w, h := i.restorable.Size() + w, h := i.Size() return image.Rect(0, 0, w, h) } @@ -276,7 +276,7 @@ func (i *Image) ReplacePixels(p []byte) error { if i.restorable == nil { return nil } - w, h := i.restorable.Size() + w, h := i.Size() if l := 4 * w * h; len(p) != l { panic(fmt.Sprintf("ebiten: len(p) was %d but must be %d", len(p), l)) }
graphics: Refactoring: call (*Image).Size() funciton as much as possible
hajimehoshi_ebiten
train
23938664bfec51e48e60ab6f712c699a6cd75477
diff --git a/test/Parser/Banking/Mt940/Engine/Rabo/ParseTest.php b/test/Parser/Banking/Mt940/Engine/Rabo/ParseTest.php index <HASH>..<HASH> 100644 --- a/test/Parser/Banking/Mt940/Engine/Rabo/ParseTest.php +++ b/test/Parser/Banking/Mt940/Engine/Rabo/ParseTest.php @@ -30,6 +30,13 @@ class ParseTest extends \PHPUnit_Framework_TestCase $this->assertEquals('Rabo', $method->invoke($this->engine)); } + public function testParsesAllFoundStatements() { + $statements = $this->engine->parse(); + $this->assertEquals(39, count($statements)); + $this->assertEquals('06-01-2003', reset($statements)->getTimestamp('d-m-Y')); + $this->assertEquals('08-01-2003', end($statements)->getTimestamp('d-m-Y')); + } + public function testInitialNegativeStatementBalance() { $this->engine->loadString(file_get_contents(__DIR__ . '/sample2')); $statements = $this->engine->parse();
updated statement test now it also checks the soon to be deprecated dates
fruitl00p_php-mt940
train
002dee9b3e26803fc3b1f1f6c9830987212e9a0e
diff --git a/api/tasks.go b/api/tasks.go index <HASH>..<HASH> 100644 --- a/api/tasks.go +++ b/api/tasks.go @@ -76,8 +76,8 @@ func (g *TaskGroup) AddTask(t *Task) *TaskGroup { // LogConfig provides configuration for log rotation type LogConfig struct { - MaxFiles int `mapstructure:"max_files"` - MaxFileSizeMB int `mapstructure:"max_file_size"` + MaxFiles int + MaxFileSizeMB int } // Task is a single process in a task group. diff --git a/client/driver/docker.go b/client/driver/docker.go index <HASH>..<HASH> 100644 --- a/client/driver/docker.go +++ b/client/driver/docker.go @@ -620,7 +620,6 @@ func (d *DockerDriver) Open(ctx *ExecContext, handleID string) (DriverHandle, er Reattach: pid.PluginConfig.PluginConfig(), } - logCollector, pluginClient, err := createLogCollector(pluginConfig, d.config.LogOutput, d.config) client, err := d.dockerClient() if err != nil { return nil, fmt.Errorf("Failed to connect to docker daemon: %s", err) @@ -645,6 +644,14 @@ func (d *DockerDriver) Open(ctx *ExecContext, handleID string) (DriverHandle, er if !found { return nil, fmt.Errorf("Failed to find container %s: %v", pid.ContainerID, err) } + logCollector, pluginClient, err := createLogCollector(pluginConfig, d.config.LogOutput, d.config) + if err != nil { + d.logger.Printf("[INFO] driver.docker: couldn't re-attach to the plugin process: %v", err) + if e := client.StopContainer(pid.ContainerID, uint(pid.KillTimeout*time.Second)); e != nil { + d.logger.Printf("[DEBUG] driver.docker: couldn't stop container: %v", e) + } + return nil, err + } // Return a driver handle h := &DockerHandle{ diff --git a/client/driver/syslog/collector.go b/client/driver/syslog/collector.go index <HASH>..<HASH> 100644 --- a/client/driver/syslog/collector.go +++ b/client/driver/syslog/collector.go @@ -78,7 +78,7 @@ func (s *SyslogCollector) LaunchCollector(ctx *LogCollectorContext) (*SyslogColl if err != nil { return nil, err } - s.logger.Printf("sylog-server: launching syslog server on addr: %v", addr) + s.logger.Printf("[DEBUG] sylog-server: launching syslog server on addr: %v", addr) s.ctx = ctx // configuring the task dir if err := s.configureTaskDir(); err != nil { @@ -117,6 +117,8 @@ func (s *SyslogCollector) LaunchCollector(ctx *LogCollectorContext) (*SyslogColl go func(channel syslog.LogPartsChannel) { for logParts := range channel { + // If the severity of the log line is err then we write to stderr + // otherwise all messages go to stdout s := logParts["severity"].(s1.Priority) if s == s1.LOG_ERR { we.Write(logParts["content"].([]byte)) diff --git a/client/driver/syslog/parser.go b/client/driver/syslog/parser.go index <HASH>..<HASH> 100644 --- a/client/driver/syslog/parser.go +++ b/client/driver/syslog/parser.go @@ -29,9 +29,9 @@ const ( // Priority holds all the priority bits in a syslog log line type Priority struct { - P syslog.Priority - F syslog.Priority - S syslog.Priority + Pri int + Facility syslog.Priority + Severity syslog.Priority } // DockerLogParser parses a line of log message that the docker daemon ships @@ -136,9 +136,9 @@ func (d *DockerLogParser) newPriority(p int) Priority { // The Priority value is calculated by first multiplying the Facility // number by 8 and then adding the numerical value of the Severity. return Priority{ - P: syslog.Priority(p), - F: syslog.Priority(p / 8), - S: syslog.Priority(p % 8), + Pri: p, + Facility: syslog.Priority(p / 8), + Severity: syslog.Priority(p % 8), } }
Handling errors when client can't re-attach to syslog collector
hashicorp_nomad
train
e7578083ff6ddbdb2d25f1a7ca60964484260a07
diff --git a/lib/actions/user.js b/lib/actions/user.js index <HASH>..<HASH> 100644 --- a/lib/actions/user.js +++ b/lib/actions/user.js @@ -133,7 +133,7 @@ function setUser(user, fetchTrips, intl) { } const { accessibilityRoutingByDefault } = user - if (accessibilityRoutingByDefault) { + if (accessibilityRoutingByDefault !== undefined) { dispatch(setQueryParam({ wheelchair: accessibilityRoutingByDefault })) } }
refactor(actions/user): address pr feedback
opentripplanner_otp-react-redux
train
95ee3e18a0b5249efdd8eaf7436c2a99521744a7
diff --git a/lib/model/adapters/mongo.js b/lib/model/adapters/mongo.js index <HASH>..<HASH> 100644 --- a/lib/model/adapters/mongo.js +++ b/lib/model/adapters/mongo.js @@ -70,6 +70,8 @@ var Mongo = function (config) { // if there's a doc, create a model out of it if (doc) { + // TODO: figure out why this uses native _ids + delete doc._id; instance = geddy.model[self.model].create(doc); }
Don't use mongo's native ID property (2)
mde_ejs
train
153571d2508a52ae258db108fba833e5816c0e9a
diff --git a/lib/vaulted_billing/gateways/authorize_net_cim.rb b/lib/vaulted_billing/gateways/authorize_net_cim.rb index <HASH>..<HASH> 100644 --- a/lib/vaulted_billing/gateways/authorize_net_cim.rb +++ b/lib/vaulted_billing/gateways/authorize_net_cim.rb @@ -154,7 +154,7 @@ module VaultedBilling end def after_post(response) - VaultedBilling.logger.debug { "Response code %s (HTTP %d), %s" % [response.message, response.code, response.body.inspect] } if VaultedBilling.logger? + VaultedBilling.logger.info { "Response code %s (HTTP %d), %s" % [response.message, response.code, response.body.inspect] } if VaultedBilling.logger? response.body = Hash.from_xml(response.body) response.success = response.body[response.body.keys.first]['messages']['resultCode'] == 'Ok' end diff --git a/lib/vaulted_billing/gateways/nmi_customer_vault.rb b/lib/vaulted_billing/gateways/nmi_customer_vault.rb index <HASH>..<HASH> 100644 --- a/lib/vaulted_billing/gateways/nmi_customer_vault.rb +++ b/lib/vaulted_billing/gateways/nmi_customer_vault.rb @@ -116,7 +116,7 @@ module VaultedBilling end def after_post(response) - VaultedBilling.logger.debug { "Response code %s (HTTP %d), %s" % [response.message, response.code, response.body.inspect] } if VaultedBilling.logger? + VaultedBilling.logger.info { "Response code %s (HTTP %d), %s" % [response.message, response.code, response.body.inspect] } if VaultedBilling.logger? response.body = Hash.from_querystring(response.body) response.success = response.body['response'] == '1' end
Upgrade response logging to INFO from DEBUG
envylabs_vaulted_billing
train
60ae0019b109e73e6e48960c6bd161bd30679bc4
diff --git a/lib/minimart/inventory_cookbook/base_cookbook.rb b/lib/minimart/inventory_cookbook/base_cookbook.rb index <HASH>..<HASH> 100644 --- a/lib/minimart/inventory_cookbook/base_cookbook.rb +++ b/lib/minimart/inventory_cookbook/base_cookbook.rb @@ -14,6 +14,10 @@ module Minimart false end + def install + raise 'not implemented' + end + end end end diff --git a/lib/minimart/inventory_cookbook/git_cookbook.rb b/lib/minimart/inventory_cookbook/git_cookbook.rb index <HASH>..<HASH> 100644 --- a/lib/minimart/inventory_cookbook/git_cookbook.rb +++ b/lib/minimart/inventory_cookbook/git_cookbook.rb @@ -3,6 +3,7 @@ module Minimart class GitCookbook < BaseCookbook attr_reader :name, + :url, :branch, :ref, :tag @@ -13,12 +14,46 @@ module Minimart @branch = opts[:branch] @ref = opts[:ref] @tag = opts[:tag] + @url = opts[:url] end def location_specification? true end + def install(output_directory) + git_repo.checkout version + git_repo.reset_hard version + new_directory = File.join(output_directory, "/#{name}-#{version}") + Utils::FileHelper.copy_directory(tmp_path, new_directory) + Utils::FileHelper.remove_directory(File.join(new_directory, '/.git')) + + metadata = Ridley::Chef::Cookbook.from_path(new_directory).metadata + + @cookbook = Minimart::Mirror::RemoteCookbook.new( + name: metadata.name, + version: metadata.version, + dependencies: metadata.dependencies) + + @version_requirement = @cookbook.version + + return @cookbook + end + + private + + def version + ref || branch || tag + end + + def git_repo + @git_repo ||= Git.clone(url, tmp_path) + end + + def tmp_path + @tmp_path ||= Utils::FileHelper.make_temporary_directory + end + end end end diff --git a/lib/minimart/mirror/cookbook_downloader.rb b/lib/minimart/mirror/cookbook_downloader.rb index <HASH>..<HASH> 100644 --- a/lib/minimart/mirror/cookbook_downloader.rb +++ b/lib/minimart/mirror/cookbook_downloader.rb @@ -6,8 +6,11 @@ module Minimart ## # TODO support all Berkshelf API sources ## - archive_file = Utils::Http.get_binary("#{cookbook.name}-#{cookbook.version}", cookbook.download_url) - Utils::Archive.extract_cookbook(archive_file, destination) + Configuration.output.puts "-- Downloading #{cookbook.name} #{cookbook.version}" + + directory = "#{cookbook.name}-#{cookbook.version}" + archive_file = Utils::Http.get_binary(directory, cookbook.download_url) + Utils::Archive.extract_cookbook(archive_file, File.join(destination, directory)) end end diff --git a/lib/minimart/mirror/inventory_builder.rb b/lib/minimart/mirror/inventory_builder.rb index <HASH>..<HASH> 100644 --- a/lib/minimart/mirror/inventory_builder.rb +++ b/lib/minimart/mirror/inventory_builder.rb @@ -16,6 +16,7 @@ module Minimart def build! make_inventory_directory + download_cookbooks_with_location_specifications build_dependency_graph fetch_inventory end @@ -26,6 +27,14 @@ module Minimart Utils::FileHelper.make_directory(inventory_directory) end + def download_cookbooks_with_location_specifications + inventory_cookbooks.each do |inventory_cookbook| + next unless inventory_cookbook.location_specification? + cookbook = inventory_cookbook.install(inventory_directory) + dependency_graph.add_remote_cookbook(cookbook) + end + end + def build_dependency_graph add_cookbooks_to_dependency_graph add_requirements_to_dependency_graph @@ -48,11 +57,9 @@ module Minimart def fetch_inventory dependency_graph.resolved_requirements.each do |resolved_requirement| name, version = resolved_requirement - Configuration.output.puts "-- Downloading #{name} #{version}" - remote_cookbook = find_remote_cookbook(name, version) - destination = File.join(inventory_directory, "#{name}-#{version}") - CookbookDownloader.download(remote_cookbook, destination) + next if remote_cookbook.nil? + CookbookDownloader.download(remote_cookbook, inventory_directory) end end @@ -61,6 +68,8 @@ module Minimart result = source.find_cookbook(name, version) return result unless result.nil? end + + return nil end end diff --git a/lib/minimart/mirror/inventory_configuration.rb b/lib/minimart/mirror/inventory_configuration.rb index <HASH>..<HASH> 100644 --- a/lib/minimart/mirror/inventory_configuration.rb +++ b/lib/minimart/mirror/inventory_configuration.rb @@ -41,7 +41,11 @@ module Minimart end def build_cookbooks_from_git_location(name, requirements) - [] + requirements[:branches].map do |branch| + InventoryCookbook::GitCookbook.new(name, + url: requirements[:url], + branch: branch) + end end def build_cookbooks_from_supermarket_location(name, requirements)
Adding basic support for git cookbooks
electric-it_minimart
train
3a08114fe5e905b74147d7dc8debc2bf966357b8
diff --git a/mutagen/mp3.py b/mutagen/mp3.py index <HASH>..<HASH> 100644 --- a/mutagen/mp3.py +++ b/mutagen/mp3.py @@ -1,5 +1,6 @@ -# MP3 stream header information support for Mutagen. -# Copyright 2006 Joe Wreschnig +# -*- coding: utf-8 -*- + +# Copyright (C) 2006 Joe Wreschnig # # This program is free software; you can redistribute it and/or modify # it under the terms of version 2 of the GNU General Public License as @@ -143,9 +144,9 @@ class MPEGInfo(StreamInfo): data = fileobj.read(32768) frame_1 = data.find(b"\xff") - while 0 <= frame_1 <= len(data) - 4: + while 0 <= frame_1 <= (len(data) - 4): frame_data = struct.unpack(">I", data[frame_1:frame_1 + 4])[0] - if (frame_data >> 16) & 0xE0 != 0xE0: + if ((frame_data >> 16) & 0xE0) != 0xE0: frame_1 = data.find(b"\xff", frame_1 + 2) else: version = (frame_data >> 19) & 0x3 @@ -181,13 +182,13 @@ class MPEGInfo(StreamInfo): if self.layer == 1: frame_length = ( - 12 * self.bitrate // self.sample_rate + padding) * 4 + (12 * self.bitrate // self.sample_rate) + padding) * 4 frame_size = 384 elif self.version >= 2 and self.layer == 3: - frame_length = 72 * self.bitrate // self.sample_rate + padding + frame_length = (72 * self.bitrate // self.sample_rate) + padding frame_size = 576 else: - frame_length = 144 * self.bitrate // self.sample_rate + padding + frame_length = (144 * self.bitrate // self.sample_rate) + padding frame_size = 1152 if check_second: @@ -199,7 +200,7 @@ class MPEGInfo(StreamInfo): ">H", data[possible:possible + 2])[0] except struct.error: raise HeaderNotFoundError("can't sync to second MPEG frame") - if frame_data & 0xFFE0 != 0xFFE0: + if (frame_data & 0xFFE0) != 0xFFE0: raise HeaderNotFoundError("can't sync to second MPEG frame") self.length = 8 * real_size / float(self.bitrate)
mp3.py: consistent headers/pep<I>, added parentheses for clarity.
quodlibet_mutagen
train
89440b10718584835397595c693d2201ad35b4e8
diff --git a/spec/analyser/statement_spec.rb b/spec/analyser/statement_spec.rb index <HASH>..<HASH> 100644 --- a/spec/analyser/statement_spec.rb +++ b/spec/analyser/statement_spec.rb @@ -10,7 +10,7 @@ module DeepCover let(:by_execution) do results .sort_by{|range, _runs| range.begin_pos } - .group_by{|_range, runs| runs != 0 } + .group_by{|_range, runs| runs && runs != 0 } .transform_values{|ranges_run_pairs| ranges_run_pairs.map(&:first)} end let(:lines_by_execution) { by_execution.transform_values{|ranges| ranges.map(&:line)} }
Differentiate nil runs from > 0
deep-cover_deep-cover
train
7887f88ced1af599ef919709c7527736d0224ea3
diff --git a/ask-sdk-dynamodb-persistence-adapter/setup.py b/ask-sdk-dynamodb-persistence-adapter/setup.py index <HASH>..<HASH> 100644 --- a/ask-sdk-dynamodb-persistence-adapter/setup.py +++ b/ask-sdk-dynamodb-persistence-adapter/setup.py @@ -61,7 +61,7 @@ setup( 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.6'. + 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7' ), python_requires=(">2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, "
Add PyPI trove classifier for Python <I> to setup.py
alexa_alexa-skills-kit-sdk-for-python
train
acd1e785bc2e1a32da377dd47b0b84912450817c
diff --git a/harpoon/imager.py b/harpoon/imager.py index <HASH>..<HASH> 100644 --- a/harpoon/imager.py +++ b/harpoon/imager.py @@ -210,7 +210,7 @@ class Image(object): ports = self.figure_out_ports(extra_ports) tty = not detach and self.interactive - links = [link.split(":") for link in self.link] + links = [(link.split(":") if ":" in link else (link, link)) for link in self.link] volumes = self.volumes if extra_volumes: if volumes is None:
Can specify links as just the container name
delfick_harpoon
train
bc9f7eacb94e05ec089ee7a2d130a2e8a9e54c64
diff --git a/ec2/spark_ec2.py b/ec2/spark_ec2.py index <HASH>..<HASH> 100755 --- a/ec2/spark_ec2.py +++ b/ec2/spark_ec2.py @@ -101,6 +101,8 @@ def parse_args(): help="The SSH user you want to connect as (default: root)") parser.add_option("--delete-groups", action="store_true", default=False, help="When destroying a cluster, delete the security groups that were created") + parser.add_option("--use-existing-master", action="store_true", default=False, + help="Launch fresh slaves, but use an existing stopped master if possible") (opts, args) = parser.parse_args() if len(args) != 2: @@ -233,9 +235,9 @@ def launch_cluster(conn, opts, cluster_name): slave_group.authorize('tcp', 60075, 60075, '0.0.0.0/0') # Check if instances are already running in our groups - active_nodes = get_existing_cluster(conn, opts, cluster_name, - die_on_error=False) - if any(active_nodes): + existing_masters, existing_slaves = get_existing_cluster(conn, opts, cluster_name, + die_on_error=False) + if existing_slaves or (existing_masters and not opts.use_existing_master): print >> stderr, ("ERROR: There are already instances running in " + "group %s or %s" % (master_group.name, slave_group.name)) sys.exit(1) @@ -336,21 +338,28 @@ def launch_cluster(conn, opts, cluster_name): zone, slave_res.id) i += 1 - # Launch masters - master_type = opts.master_instance_type - if master_type == "": - master_type = opts.instance_type - if opts.zone == 'all': - opts.zone = random.choice(conn.get_all_zones()).name - master_res = image.run(key_name = opts.key_pair, - security_groups = [master_group], - instance_type = master_type, - placement = opts.zone, - min_count = 1, - max_count = 1, - block_device_map = block_map) - master_nodes = master_res.instances - print "Launched master in %s, regid = %s" % (zone, master_res.id) + # Launch or resume masters + if existing_masters: + print "Starting master..." + for inst in existing_masters: + if inst.state not in ["shutting-down", "terminated"]: + inst.start() + master_nodes = existing_masters + else: + master_type = opts.master_instance_type + if master_type == "": + master_type = opts.instance_type + if opts.zone == 'all': + opts.zone = random.choice(conn.get_all_zones()).name + master_res = image.run(key_name = opts.key_pair, + security_groups = [master_group], + instance_type = master_type, + placement = opts.zone, + min_count = 1, + max_count = 1, + block_device_map = block_map) + master_nodes = master_res.instances + print "Launched master in %s, regid = %s" % (zone, master_res.id) # Return all the instances return (master_nodes, slave_nodes) @@ -732,6 +741,7 @@ def real_main(): cluster_name + "?\nDATA ON EPHEMERAL DISKS WILL BE LOST, " + "BUT THE CLUSTER WILL KEEP USING SPACE ON\n" + "AMAZON EBS IF IT IS EBS-BACKED!!\n" + + "All data on spot-instance slaves will be lost.\n" + "Stop cluster " + cluster_name + " (y/N): ") if response == "y": (master_nodes, slave_nodes) = get_existing_cluster( @@ -743,7 +753,10 @@ def real_main(): print "Stopping slaves..." for inst in slave_nodes: if inst.state not in ["shutting-down", "terminated"]: - inst.stop() + if inst.spot_instance_request_id: + inst.terminate() + else: + inst.stop() elif action == "start": (master_nodes, slave_nodes) = get_existing_cluster(conn, opts, cluster_name)
Enable stopping and starting a spot cluster
apache_spark
train
afecd90a6ca1551057631d21d940af86385ac535
diff --git a/modules/caddyhttp/reverseproxy/caddyfile.go b/modules/caddyhttp/reverseproxy/caddyfile.go index <HASH>..<HASH> 100644 --- a/modules/caddyhttp/reverseproxy/caddyfile.go +++ b/modules/caddyhttp/reverseproxy/caddyfile.go @@ -670,6 +670,16 @@ func (h *HTTPTransport) UnmarshalCaddyfile(d *caddyfile.Dispenser) error { h.TLS.RootCAPEMFiles = args + case "tls_server_name": + if !d.NextArg() { + return d.ArgErr() + } + if h.TLS == nil { + h.TLS = new(TLSConfig) + } + + h.TLS.ServerName = d.Val() + case "keepalive": if !d.NextArg() { return d.ArgErr()
reverseproxy: Add tls_server_name option to Caddyfile (#<I>)
mholt_caddy
train
5a35359f51089b747e40f26ad11bd21a149e9d10
diff --git a/lib/couchrest/helper/file_manager.rb b/lib/couchrest/helper/file_manager.rb index <HASH>..<HASH> 100644 --- a/lib/couchrest/helper/file_manager.rb +++ b/lib/couchrest/helper/file_manager.rb @@ -175,22 +175,9 @@ module CouchRest libs << open(global_lib).read libs << "\n" end - Dir["#{forms_dir}/#{folder}/lib.#{design_lang}"].collect do |global_lib| - libs << open(global_lib).read - libs << "\n" - end - designs[folder]["forms"]["#{form_name}"] = read(design_doc, libs) - end + designs[folder]["forms"]["#{form_name}"] = read(design_doc, libs) end - # # cleanup empty maps and reduces - # designs.each do |name, props| - # props["forms"].each do |view, funcs| - # next unless view.include?("reduce") - # props["forms"].delete(view) unless funcs.keys.include?("reduce") - # end - # end - designs.each do |k,v| create_or_update("_design/#{k}", v) end
push forms/ into _design/$this/forms
couchrest_couchrest
train
b20ecc7b54f63457c5d0b20b4ea225ffe9b4071e
diff --git a/internal/kms/kes.go b/internal/kms/kes.go index <HASH>..<HASH> 100644 --- a/internal/kms/kes.go +++ b/internal/kms/kes.go @@ -27,6 +27,10 @@ import ( "github.com/minio/kes" ) +const ( + tlsClientSessionCacheSize = 100 +) + // Config contains various KMS-related configuration // parameters - like KMS endpoints or authentication // credentials. @@ -59,9 +63,10 @@ func NewWithConfig(config Config) (KMS, error) { copy(endpoints, config.Endpoints) client := kes.NewClientWithConfig("", &tls.Config{ - MinVersion: tls.VersionTLS12, - Certificates: []tls.Certificate{config.Certificate}, - RootCAs: config.RootCAs, + MinVersion: tls.VersionTLS12, + Certificates: []tls.Certificate{config.Certificate}, + RootCAs: config.RootCAs, + ClientSessionCache: tls.NewLRUClientSessionCache(tlsClientSessionCacheSize), }) client.Endpoints = endpoints return &kesClient{
Add support of TLS session tickets with KES server (#<I>) Reduce overhead for communication between MinIO server and KES server.
minio_minio
train
ab83fb9ebbfb0d8d7afb09ddf0daac82c8c13c80
diff --git a/src/Illuminate/Database/Eloquent/Concerns/HasTimestamps.php b/src/Illuminate/Database/Eloquent/Concerns/HasTimestamps.php index <HASH>..<HASH> 100644 --- a/src/Illuminate/Database/Eloquent/Concerns/HasTimestamps.php +++ b/src/Illuminate/Database/Eloquent/Concerns/HasTimestamps.php @@ -42,7 +42,7 @@ trait HasTimestamps $this->setUpdatedAt($time); } - if (! $this->exists && ! $this->isDirty(static::CREATED_AT)) { + if (! $this->exists && ! is_null(static::CREATED_AT) && ! $this->isDirty(static::CREATED_AT)) { $this->setCreatedAt($time); } }
[<I>] Allow to disable `CREATED_AT`. (cherry picked from commit <I>b7fc)
laravel_framework
train
8be1a9bdf4b39cd6adc55added7f183c74429f49
diff --git a/run/lib/run.js b/run/lib/run.js index <HASH>..<HASH> 100644 --- a/run/lib/run.js +++ b/run/lib/run.js @@ -1,21 +1,9 @@ +var fs = require('fs') var cp = require('child_process') var path = require('path') var CommonEnv = require('../env') var formattedOutput = require('./formatted_output') -var scripts = [ - { - ext: 'sh', - cmd: 'sh', - error: /No such file or directory/i - }, - { - ext: 'js', - cmd: 'node', - error: /Cannot find module/i - } -] - module.exports = function run(taskname, SpecialEnv) { formattedOutput.start({ taskname }) @@ -26,32 +14,56 @@ module.exports = function run(taskname, SpecialEnv) { .catch(formattedOutput.fail) } -function execute(taskname, env, scriptIndex = 0) { +var scripts = [ + { ext: 'sh', cmd: 'sh' }, + { ext: 'js', cmd: 'node' } +] + +function findScript(_path, scriptIndex = 0) { return new Promise((resolve, reject) => { var script = scripts[scriptIndex] - var file = path.join(__dirname, '..', 'tasks', `${taskname}.${script.ext}`) - var command = script.cmd + ' '+ file - var ps = cp.exec(command, { env }) - var stderr = '' - - ps.stdout.pipe(process.stdout) - ps.stderr.pipe(process.stderr) - ps.stderr.on('data', data => { stderr += data }) - - ps.on('close', code => { - if (code == 0) { - resolve({ taskname }) - - } else if (script.error.test(stderr) && scripts.length > scriptIndex + 1) { - resolve(execute(taskname, env, scriptIndex + 1)) - - } else { - reject({ - taskname, - code, - stderr - }) + var name = `${_path}.${script.ext}` + fs.stat(name, err => { + if (err) { + if (scripts.length > scriptIndex + 1) { + return resolve(findScript(_path, scriptIndex + 1)) + } + return reject({ err, notFound: true }) } + resolve(script) }) }) } + +function execute(taskname, env) { + return new Promise((resolve, reject) => { + var file = path.join(__dirname, '..', 'tasks', `${taskname}`) + + findScript(file).then(script => { + var command = script.cmd + ' ' + `${file}.${script.ext}` + var ps = cp.exec(command, { env }) + var stderr = '' + + ps.stdout.pipe(process.stdout) + ps.stderr.pipe(process.stderr) + ps.stderr.on('data', data => { stderr += data }) + + ps.on('close', code => { + if (code == 0) { + resolve({ taskname }) + + } else { + reject({ + taskname, + code, + stderr + }) + } + }) + }).catch(e => reject({ + taskname, + code: 1, + stderr: e.notFound ? `Task ${taskname} couldn't found.` : e + })) + }) +}
Handle finding the script for the task silently
scriptype_salinger
train
10ee5c8999626ce91746c3720f6051475e3e58d0
diff --git a/lib/jekyll/site.rb b/lib/jekyll/site.rb index <HASH>..<HASH> 100644 --- a/lib/jekyll/site.rb +++ b/lib/jekyll/site.rb @@ -88,17 +88,8 @@ module Jekyll end end - self.converters = Jekyll::Converter.subclasses.select do |c| - !self.safe || c.safe - end.map do |c| - c.new(self.config) - end - - self.generators = Jekyll::Generator.subclasses.select do |c| - !self.safe || c.safe - end.map do |c| - c.new(self.config) - end + self.converters = instantiate_subclasses(Jekyll::Converter) + self.generators = instantiate_subclasses(Jekyll::Generator) end # Internal: Setup the plugin search path @@ -388,6 +379,21 @@ module Jekyll end end + # Create array of instances of the subclasses of the class or module + # passed in as argument. + # + # klass - class or module containing the subclasses which should be + # instantiated + # + # Returns array of instances of subclasses of parameter + def instantiate_subclasses(klass) + klass.subclasses.select do |c| + !self.safe || c.safe + end.map do |c| + c.new(self.config) + end + end + # Read the entries from a particular directory for processing # # dir - The String relative path of the directory to read
Remove the duplication when creating Converters and Generators Encapsulate it in a method and give the method the class to walk the subclass tree for to create new objects.
jekyll_jekyll
train
fb0750710ed16a71405a5d519d2047164356d994
diff --git a/caravel/assets/visualizations/nvd3_vis.js b/caravel/assets/visualizations/nvd3_vis.js index <HASH>..<HASH> 100644 --- a/caravel/assets/visualizations/nvd3_vis.js +++ b/caravel/assets/visualizations/nvd3_vis.js @@ -75,7 +75,7 @@ function nvd3Vis(slice) { chart.width(width); chart.xAxis .showMaxMin(false) - .staggerLabels(true) + .staggerLabels(true); chart.stacked(fd.bar_stacked); break;
[hotfix] missing semicolon breaking build
apache_incubator-superset
train
d6173fd2a4f54fd52791037b52b32319a1ba5c4e
diff --git a/packages/react-scripts/config/webpack.config.prod.js b/packages/react-scripts/config/webpack.config.prod.js index <HASH>..<HASH> 100644 --- a/packages/react-scripts/config/webpack.config.prod.js +++ b/packages/react-scripts/config/webpack.config.prod.js @@ -197,6 +197,8 @@ module.exports = { loader: 'css-loader', options: { importLoaders: 1, + minimize: true, + sourceMap: true, }, }, {
Minify and map css (#<I>) Minify and map css
vcarl_create-react-app
train
fee984ce0553f757c52a3af2ca6e21f3cf08f010
diff --git a/simplekv/crypt.py b/simplekv/crypt.py index <HASH>..<HASH> 100644 --- a/simplekv/crypt.py +++ b/simplekv/crypt.py @@ -21,8 +21,8 @@ class _HMACFileReader(object): '(too small)') def read(self, n=None): - if '' == self.buffer or 0 == n: - return '' + if b'' == self.buffer or 0 == n: + return b'' new_read = self.source.read(n) if n is not None else self.source.read() finished = (n is None or len(new_read) != n) @@ -94,11 +94,6 @@ class HMACDecorator(StoreDecorator): self.__hashfunc = hashfunc self.__secret_key = bytes(secret_key) - @property - def hmac_digestsize(self): - # returns, in bytes, the size of the digest - return self.hmac_mixin_hashfunc().digestsize - def __new_hmac(self, key, msg=None): if not msg: msg = b'' @@ -130,7 +125,7 @@ class HMACDecorator(StoreDecorator): if isinstance(file, str): try: f = open(file, 'wb') - except OSError as e: + except (OSError, IOError) as e: raise IOError('Error opening %s for writing: %r' % ( file, e )) diff --git a/tests/test_hmac.py b/tests/test_hmac.py index <HASH>..<HASH> 100644 --- a/tests/test_hmac.py +++ b/tests/test_hmac.py @@ -39,10 +39,27 @@ class TestHMACFileReader(object): def chunk_sizes(self, value): return [10 ** n for n in xrange(2, 8)] + def test_close(self, create_reader): + reader = create_reader() + assert not reader.source.closed + reader.close() + assert reader.source.closed + + def test_close_via_context(self, create_reader): + reader = create_reader() + assert not reader.source.closed + with reader as r: + assert r is reader + assert reader.source.closed + def test_reading_limit_0(self, create_reader): reader = create_reader() - assert reader.read(0) == '' - assert reader.read(0) == '' + data = reader.read(0) + assert isinstance(data, bytes) + assert len(data) == 0 + data = reader.read(0) + assert isinstance(data, bytes) + assert len(data) == 0 def test_reading_with_limit(self, secret_key, hashfunc, value, create_reader, chunk_sizes): @@ -100,6 +117,7 @@ class TestHMACFileReader(object): # this only works with dicts, as we access the internal structures to # manipulate values class HMACDec(object): + @pytest.fixture def hmacstore(self, secret_key, store): return HMACDecorator(secret_key, store) @@ -111,6 +129,32 @@ class HMACDec(object): with pytest.raises(VerificationException): hmacstore.get(key) + def test_copy_raises_not_implemented(self, store): + with pytest.raises(NotImplementedError): + HMACDecorator(b'secret', store).copy(u'src', u'dest') + + def test_put_file_obj(self, key, value, hmacstore): + hmacstore.put_file(key, BytesIO(value)) + assert hmacstore.get(key) == value + + def test_put_file_str(self, key, value, hmacstore): + with tempfile.NamedTemporaryFile(mode='wb', delete=False) as f: + f.write(value) + hmacstore.put_file(key, f.name) + assert hmacstore.get(key) == value + + def test_get_file_obj(self, key, value, hmacstore): + hmacstore.put(key, value) + b = BytesIO() + hmacstore.get_file(key, b) + assert b.getvalue() == value + + def test_get_file_non_writable_target(self, key, value, hmacstore): + hmacstore.put(key, value) + path = '/tmp/this/file/does/not/exist' + with pytest.raises(IOError, match='Error opening {} for writing'.format(path)): + hmacstore.get_file(key, path) + def test_get_file_fails_on_manipulation(self, hmacstore, key, value): hmacstore.put(key, value) hmacstore.d[key] += b('a')
improve tests for hmac store decorator
mbr_simplekv
train
27ce6948fc2aa1054ad1bef25230cc3b648d92af
diff --git a/transaction.go b/transaction.go index <HASH>..<HASH> 100644 --- a/transaction.go +++ b/transaction.go @@ -145,25 +145,6 @@ func DeleteTransaction(name string) error { return nil } -func TransactionName(name string) (*Transaction, error) { - params := transactionRequest{Name: name} - req := NewJSON2Request("transaction-hash", APICounter(), params) - - resp, err := walletRequest(req) - if err != nil { - return nil, err - } - if resp.Error != nil { - return nil, resp.Error - } - tx := new(Transaction) - if err := json.Unmarshal(resp.JSONResult(), tx); err != nil { - return nil, err - } - - return tx, nil -} - func ListTransactionsAll() ([]*Transaction, error) { type multiTransactionResponse struct { Transactions []*Transaction `json:"transactions"` @@ -487,7 +468,7 @@ func SendTransaction(name string) (*Transaction, error) { if fresp.Error != nil { return nil, fresp.Error } - tx, err := TransactionName(name) + tx, err := GetTmpTransaction(name) if err != nil { return nil, err } @@ -633,3 +614,23 @@ func GetTransaction(txID string) (*TransactionResponse, error) { return txResp, nil } + +// GetTmpTransaction gets a temporary transaction from the wallet +func GetTmpTransaction(name string) (*Transaction, error) { + params := transactionRequest{Name: name} + req := NewJSON2Request("transaction-hash", APICounter(), params) + + resp, err := walletRequest(req) + if err != nil { + return nil, err + } + if resp.Error != nil { + return nil, resp.Error + } + tx := new(Transaction) + if err := json.Unmarshal(resp.JSONResult(), tx); err != nil { + return nil, err + } + + return tx, nil +}
get tmp transactions from the wallet
FactomProject_factom
train
8ba60d07bfc99a5db767819fa8fb81c924f0ef19
diff --git a/src/class/ResClient.js b/src/class/ResClient.js index <HASH>..<HASH> 100644 --- a/src/class/ResClient.js +++ b/src/class/ResClient.js @@ -197,7 +197,10 @@ class ResClient { this.tryConnect = false; if (this.ws) { - this.ws.close(); + let ws = this.ws; + ws.onclose = null; + this._handleOnclose("disconnect() called"); + ws.close(); this._connectReject({ code: 'system.disconnect', message: "Disconnect called" }); } } @@ -720,8 +723,6 @@ class ResClient { * @private */ _handleOnopen(e) { - this.connected = true; - this._sendNow('version', { protocol: this.supportedProtocol }) .then(ver => { this.protocol = versionToInt(ver.protocol) || legacyProtocol; @@ -735,8 +736,16 @@ class ResClient { } throw err; }) - .then(() => this.onConnect ? this.onConnect(this) : null) .then(() => { + if (this.onConnect) { + this.connected = true; + let promise = this.onConnect(this); + this.connected = false; + return promise; + } + }) + .then(() => { + this.connected = true; this._subscribeToAllStale(); this._emit('connect', e); this._connectResolve(); diff --git a/src/class/ResModel.js b/src/class/ResModel.js index <HASH>..<HASH> 100644 --- a/src/class/ResModel.js +++ b/src/class/ResModel.js @@ -184,7 +184,7 @@ class ResModel { toJSON() { let o = this._definition ? obj.copy(this._props, this._definition) - : this._props; + : Object.assign({}, this._props); for (let k in o) { var v = o[k]; if (typeof v === 'object' && v !== null && v.toJSON) {
Fixed minor bug in ResModel. Fixed ResClient disconnect issue with onConnect callback.
jirenius_resclient
train
90286d65b8ad67ceb82f50a13d3af6a39dc98f4c
diff --git a/src/Bandcamp.php b/src/Bandcamp.php index <HASH>..<HASH> 100644 --- a/src/Bandcamp.php +++ b/src/Bandcamp.php @@ -30,6 +30,7 @@ class Bandcamp 'botanicalhouse.net', 'fikarecordings.com', 'mamabirdrecordingco.com', + 'maybemars.org', 'souterraine.biz', 'sunnysidezone.com', ]; diff --git a/tests/BandcampTest.php b/tests/BandcampTest.php index <HASH>..<HASH> 100644 --- a/tests/BandcampTest.php +++ b/tests/BandcampTest.php @@ -49,6 +49,7 @@ class BandcampTest extends \PHPUnit_Framework_TestCase { return [ ['https://bandcamp.com/track/title', 'Bandcamp'], + ['http://downloads.maybemars.org/', 'Bandcamp'], ['http://music.botanicalhouse.net/', 'Bandcamp'], ['http://tunes.mamabirdrecordingco.com/', 'Bandcamp'], ['http://souterraine.biz/', 'Bandcamp'],
Add maybemars.org in Bandcamp
jamband_ripple
train
b608e3c08270755def0837749f582f4d8232e393
diff --git a/lib/mongify/translation/process.rb b/lib/mongify/translation/process.rb index <HASH>..<HASH> 100644 --- a/lib/mongify/translation/process.rb +++ b/lib/mongify/translation/process.rb @@ -12,8 +12,8 @@ module Mongify prepare_connections(sql_connection, no_sql_connection) setup_db_index copy_data - copy_embedded_tables update_reference_ids + copy_embedded_tables copy_polymorphic_tables remove_pre_mongified_ids nil
Changed order of updating references with embedded_tables
anlek_mongify
train
667ee8e15529b252cc2d6bcfc0acfd4a5b248c49
diff --git a/bcbio/variation/vardict.py b/bcbio/variation/vardict.py index <HASH>..<HASH> 100644 --- a/bcbio/variation/vardict.py +++ b/bcbio/variation/vardict.py @@ -126,8 +126,9 @@ def _run_vardict_caller(align_bams, items, ref_file, assoc_files, fix_ambig_alt = vcfutils.fix_ambiguous_cl(5) remove_dup = vcfutils.remove_dup_cl() jvm_opts = _get_jvm_opts(items[0], tx_out_file) - r_setup = "unset R_HOME && export PATH=%s:$PATH && " % os.path.dirname(utils.Rscript_cmd()) - cmd = ("{r_setup}{jvm_opts}{vardict} -G {ref_file} -f {freq} " + setup = ("unset R_HOME && unset JAVA_HOME && export PATH=%s:$PATH && " % + os.path.dirname(utils.Rscript_cmd())) + cmd = ("{setup}{jvm_opts}{vardict} -G {ref_file} -f {freq} " "-N {sample} -b {bamfile} {opts} " "| {strandbias}" "| {var2vcf} -N {sample} -E -f {freq} {var2vcf_opts} " @@ -276,8 +277,9 @@ def _run_vardict_paired(align_bams, items, ref_file, assoc_files, (os.path.join(os.path.dirname(sys.executable), "py"), 0, dd.get_aligner(paired.tumor_data))) jvm_opts = _get_jvm_opts(items[0], tx_out_file) - r_setup = "unset R_HOME && export PATH=%s:$PATH && " % os.path.dirname(utils.Rscript_cmd()) - cmd = ("{r_setup}{jvm_opts}{vardict} -G {ref_file} -f {freq} " + setup = ("unset R_HOME && unset JAVA_HOME && export PATH=%s:$PATH && " % + os.path.dirname(utils.Rscript_cmd())) + cmd = ("{setup}{jvm_opts}{vardict} -G {ref_file} -f {freq} " "-N {paired.tumor_name} -b \"{paired.tumor_bam}|{paired.normal_bam}\" {opts} " "| {strandbias} " "| {var2vcf} -P 0.9 -m 4.25 -f {freq} {var2vcf_opts} "
VarDict: ensure anaconda installed java used Latest vardict-java release requires Java <I> which we ship with bcbio. However, JAVA_HOME could lead to using a different system java. We unset JAVA_HOME to avoid this issue.
bcbio_bcbio-nextgen
train
f5dad80072779459b8a18a5284cd3401dc0501cd
diff --git a/openfisca_core/scripts/__init__.py b/openfisca_core/scripts/__init__.py index <HASH>..<HASH> 100644 --- a/openfisca_core/scripts/__init__.py +++ b/openfisca_core/scripts/__init__.py @@ -4,18 +4,12 @@ import traceback import importlib import logging import pkgutil -import sys from os import linesep log = logging.getLogger(__name__) logging.basicConfig(format='%(levelname)s: %(message)s') -def handle_error(error_message): - log.error(error_message) - sys.exit(1) - - def add_tax_benefit_system_arguments(parser): parser.add_argument('-c', '--country-package', action = 'store', help = u'country package to use. If not provided, an automatic detection will be attempted by scanning the python packages installed in your environment which name contains the word "openfisca".') parser.add_argument('-e', '--extensions', action = 'store', help = u'extensions to load', nargs = '*') @@ -35,9 +29,9 @@ def build_tax_benefit_system(country_package_name, extensions, reforms): u'Are you sure it is installed in your environment? If so, look at the stack trace above to determine the origin of this error.', u'See more at <https://github.com/openfisca/country-template#installing>.']) - handle_error(message) + raise ImportError(message) if not hasattr(country_package, 'CountryTaxBenefitSystem'): - handle_error(u'`{}` does not seem to be a valid Openfisca country package.'.format(country_package_name)) + raise ImportError(u'`{}` does not seem to be a valid Openfisca country package.'.format(country_package_name)) country_package = importlib.import_module(country_package_name) tax_benefit_system = country_package.CountryTaxBenefitSystem() @@ -68,12 +62,12 @@ def detect_country_package(): message = linesep.join([traceback.format_exc(), u'Could not import module `{}`.'.format(module_name), u'Look at the stack trace above to determine the error that stopped installed modules detection.']) - handle_error(message) + raise ImportError(message) if hasattr(module, 'CountryTaxBenefitSystem'): installed_country_packages.append(module_name) if len(installed_country_packages) == 0: - handle_error(u'No country package has been detected on your environment. If your country package is installed but not detected, please use the --country-package option.') + raise ImportError(u'No country package has been detected on your environment. If your country package is installed but not detected, please use the --country-package option.') if len(installed_country_packages) > 1: log.warning(u'Several country packages detected : `{}`. Using `{}` by default. To use another package, please use the --country-package option.'.format(', '.join(installed_country_packages), installed_country_packages[0])) return installed_country_packages[0]
Improve error handling in tbs creation
openfisca_openfisca-core
train
15b2ad9f43b6f1c1cba9629ceb5f3dc9496f2fd9
diff --git a/out_response.js b/out_response.js index <HASH>..<HASH> 100644 --- a/out_response.js +++ b/out_response.js @@ -170,6 +170,10 @@ TChannelOutResponse.prototype.sendCallResponseFrame = function sendCallResponseF bufArg3: arg3.slice(0, 50), arg3: String(arg3).slice(0, 50) })); + break; + default: + // TODO: log warn + break; } }; @@ -193,6 +197,10 @@ TChannelOutResponse.prototype.sendCallResponseContFrame = function sendCallRespo state: self.state, method: 'sendCallResponseContFrame' })); + break; + default: + // TODO: log warn + break; } };
linting: [out_response] comply with default-case rule
uber_tchannel-node
train
bf92acc2530f46711ffaa3925882ace38f617d8a
diff --git a/question/engine/tests/unitofwork_test.php b/question/engine/tests/unitofwork_test.php index <HASH>..<HASH> 100644 --- a/question/engine/tests/unitofwork_test.php +++ b/question/engine/tests/unitofwork_test.php @@ -126,7 +126,7 @@ class question_engine_unit_of_work_test extends data_loading_method_test_base { array(1, 1, 'unit_test', 'interactive', 1, 123, 1, 1, 'interactive', -1, 1, 1.0000000, 0.0000000, 0, '', '', '', 1256233790, 2, 1, 'todo', null, 1256233720, 1, '-_triesleft', 1), array(1, 1, 'unit_test', 'interactive', 1, 123, 1, 1, 'interactive', -1, 1, 1.0000000, 0.0000000, 0, '', '', '', 1256233790, 3, 2, 'todo', null, 1256233740, 1, '-tryagain', 1), array(1, 1, 'unit_test', 'interactive', 1, 123, 1, 1, 'interactive', -1, 1, 1.0000000, 0.0000000, 0, '', '', '', 1256233790, 5, 3, 'gradedright', null, 1256233790, 1, 'answer', 'frog'), - array(1, 1, 'unit_test', 'interactive', 1, 123, 1, 1, 'interactive', -1, 1, 1.0000000, 0.0000000, 0, '', '', '', 1256233790, 5, 3, 'gradedright', 1.0000000, 1256233790, 1, '-finish', 1), + array(1, 1, 'unit_test', 'interactive', 1, 123, 1, 1, 'interactive', -1, 1, 1.0000000, 0.0000000, 0, '', '', '', 1256233790, 5, 3, 'gradedright', 1.0000000, 1256233790, 1, '-submit', 1), ); } @@ -196,13 +196,7 @@ class question_engine_unit_of_work_test extends data_loading_method_test_base { public function test_regrade_same_steps() { // Change the question in a minor way and regrade. - if (!isset($this->quba->get_question($this->slot)->answer)) { - $this->quba->get_question($this->slot)->answer = array(); - } - if (!isset($this->quba->get_question($this->slot)->answer[14])) { - $this->quba->get_question($this->slot)->answer[14] = new stdClass(); - } - $this->quba->get_question($this->slot)->answer[14]->fraction = 0.5; + $this->quba->get_question($this->slot)->answers[14]->fraction = 0.5; $this->quba->regrade_all_questions(); // Here, the qa, and all the steps, should be marked as updated. @@ -258,7 +252,7 @@ class question_engine_unit_of_work_test extends data_loading_method_test_base { $this->assertEquals(array('-tryagain' => 1), $firstdeletedstep->get_all_data()); $seconddeletedstep = end($deletedsteps); - $this->assertEquals(array('answer' => 'frog', '-finish' => 1), + $this->assertEquals(array('answer' => 'frog', '-submit' => 1), $seconddeletedstep->get_all_data()); }
MDL-<I> question engine: fixup tests that this change broke. The test data was wrong, and was triggering the work-around code that MDL-<I> introduced. I fixed the test data. Also, I fixed one of the tests, that had been broken.
moodle_moodle
train
f0dccab88425c515b03e5bc3c2b35c5595ee3e07
diff --git a/tests/test_io/test_annotation_format.py b/tests/test_io/test_annotation_format.py index <HASH>..<HASH> 100644 --- a/tests/test_io/test_annotation_format.py +++ b/tests/test_io/test_annotation_format.py @@ -1,13 +1,22 @@ -from os.path import join +"""Test model annotations in JSON format.""" + +from pathlib import Path import pytest from cobra.io import load_json_model, write_sbml_model -def test_load_json_model_valid(data_directory, tmp_path): - """Test loading a valid annotation from JSON.""" - path_to_file = join(data_directory, "valid_annotation_format.json") +def test_load_json_model_valid(data_directory: Path, tmp_path: Path) -> None: + """Test loading a valid annotation from JSON. + + data_directory : pathlib.Path + The path to the test data directory. + tmp_path_order : pathlib.Path + The folder path for storing I/O order test files. + + """ + path_to_file = data_directory / "valid_annotation_format.json" model = load_json_model(path_to_file) expected = { "bigg.reaction": [["is", "PFK26"]], @@ -16,12 +25,17 @@ def test_load_json_model_valid(data_directory, tmp_path): } for metabolite in model.metabolites: assert metabolite.annotation == expected - path_to_output = join(str(tmp_path), "valid_annotation_output.xml") - write_sbml_model(model, path_to_output) + path_to_output = tmp_path / "valid_annotation_output.xml" + write_sbml_model(model, str(path_to_output.resolve())) + + +def test_load_json_model_invalid(data_directory: Path) -> None: + """Test that loading an invalid annotation from JSON raises TypeError. + data_directory : pathlib.Path + The path to the test data directory. -def test_load_json_model_invalid(data_directory): - """Test that loading an invalid annotation from JSON raises TypeError.""" - path = join(data_directory, "invalid_annotation_format.json") + """ + path = data_directory / "invalid_annotation_format.json" with pytest.raises(TypeError): - model = load_json_model(path) + load_json_model(path)
refactor: resolve D<I>, F<I> and add type annotations, docstrings for test_annotation_format.py
opencobra_cobrapy
train
ece89d8b1b55c795e1de3e8842e22ca3eebd3f3d
diff --git a/core.py b/core.py index <HASH>..<HASH> 100644 --- a/core.py +++ b/core.py @@ -98,13 +98,15 @@ def setup (**attrs): dist.run_commands () except KeyboardInterrupt: raise SystemExit, "interrupted" - except IOError, exc: + except (OSError, IOError), exc: # arg, try to work with Python pre-1.5.2 if hasattr (exc, 'filename') and hasattr (exc, 'strerror'): raise SystemExit, \ "error: %s: %s" % (exc.filename, exc.strerror) else: raise SystemExit, str (exc) + except DistutilsExecError, msg: + raise SystemExit, "error: " + str (msg) # setup ()
Beefed up error-handling in 'setup()' a smidge: handle OSError and DistutilsExecError now.
pypa_setuptools
train
015b3035877b621e4e0bb9ed01a32bb0a65bd4b6
diff --git a/source/rafcon/core/start.py b/source/rafcon/core/start.py index <HASH>..<HASH> 100755 --- a/source/rafcon/core/start.py +++ b/source/rafcon/core/start.py @@ -75,7 +75,7 @@ def setup_environment(): if not os.environ.get('RAFCON_LIB_PATH', None): # set env variable RAFCON_LIB_PATH to the library directory of RAFCON (when not using RMPM) - os.environ['RAFCON_LIB_PATH'] = join(dirname(rafcon_root_path), 'libraries') + os.environ['RAFCON_LIB_PATH'] = join(dirname(dirname(rafcon_root_path)), 'share', 'libraries') def parse_state_machine_path(path):
Set correct RAFCON_LIB_PATH The path to the generic libraries has been changed a while ago. The RAFCON_LIB_PATH hasn't been adapted. This is done in this commit.
DLR-RM_RAFCON
train
0c4da882c2a51135fad7ba36895d1158cf400327
diff --git a/state/api/base/caller.go b/state/api/base/caller.go index <HASH>..<HASH> 100644 --- a/state/api/base/caller.go +++ b/state/api/base/caller.go @@ -5,7 +5,7 @@ package base // APICaller is implemented by the client-facing State object. type APICaller interface { - // Call makes a call to the API server with the given object type, + // APICall makes a call to the API server with the given object type, // id, request and parameters. The response is filled in with the // call's result if the call is successful. APICall(objType string, version int, id, request string, params, response interface{}) error @@ -40,6 +40,8 @@ type facadeCaller struct { caller APICaller } +var _ FacadeCaller = facadeCaller{} + // FacadeCall will place a request against the API using the requested // Facade and the best version that the API server supports that is // also known to the client. (id is always passed as the empty string.) diff --git a/state/api/base/clientfacade.go b/state/api/base/clientfacade.go index <HASH>..<HASH> 100644 --- a/state/api/base/clientfacade.go +++ b/state/api/base/clientfacade.go @@ -13,13 +13,16 @@ type APICallCloser interface { } // ClientFacade should be embedded by client-side facades that are intended as -// "client" (aka user facing) facades. (In comparison to agent facing facades.) +// "client" (aka user facing) facades versus agent facing facades. // They provide two common methods for writing the client side code. // BestAPIVersion() is used to allow for compatibility testing, and Close() is // used to indicate when we are done with the connection. type ClientFacade interface { - // BestAPIVersion returns the version of the API that will be + // BestAPIVersion returns the API version that we were able to + // determine is supported by both the client and the API Server BestAPIVersion() int + + // Close the connection to the API server. Close() error } @@ -32,6 +35,8 @@ type clientFacade struct { closer } +var _ ClientFacade = (*clientFacade)(nil) + // NewClientFacade prepares a client-facing facade for work against the API. // It is expected that most client-facing facades will embed a ClientFacade and // will use a FacadeCaller so this function returns both. diff --git a/state/api/common/watch.go b/state/api/common/watch.go index <HASH>..<HASH> 100644 --- a/state/api/common/watch.go +++ b/state/api/common/watch.go @@ -12,7 +12,7 @@ import ( ) // Watch starts a NotifyWatcher for the entity with the specified tag. -// TODO: Watch should tage a names.Tag instead of a tag string +// TODO: Watch should tag a names.Tag instead of a tag string func Watch(facade base.FacadeCaller, tag string) (watcher.NotifyWatcher, error) { var results params.NotifyWatchResults args := params.Entities{ diff --git a/state/api/facadeversions.go b/state/api/facadeversions.go index <HASH>..<HASH> 100644 --- a/state/api/facadeversions.go +++ b/state/api/facadeversions.go @@ -6,6 +6,8 @@ package api // facadeVersions lists the best version of facades that we know about. This // will be used to pick out a default version for communication, given the list // of known versions that the API server tells us it is capable of supporting. +// This map should be updated whenever the API server exposes a new version (so +// that the client will use it whenever it is available). var facadeVersions = map[string]int{ "Agent": 0, "AllWatcher": 0, diff --git a/state/api/facadeversions_test.go b/state/api/facadeversions_test.go index <HASH>..<HASH> 100644 --- a/state/api/facadeversions_test.go +++ b/state/api/facadeversions_test.go @@ -1,15 +1,15 @@ -// Copyright 2013 Canonical Ltd. +// Copyright 2014 Canonical Ltd. // Licensed under the AGPLv3, see LICENCE file for details. package api_test import ( + "github.com/juju/utils/set" gc "launchpad.net/gocheck" "github.com/juju/juju/state/api" "github.com/juju/juju/state/apiserver/common" coretesting "github.com/juju/juju/testing" - "github.com/juju/utils/set" ) type facadeVersionSuite struct { @@ -34,8 +34,8 @@ func (*facadeVersionSuite) TestFacadeVersionsMatchServerVersions(c *gc.C) { serverFacadeBestVersions[facade.Name] = facade.Versions[len(facade.Versions)-1] } // First check that both sides know about all the same versions - c.Check(serverFacadeNames.Difference(clientFacadeNames).SortedValues(), gc.DeepEquals, []string{}) - c.Check(clientFacadeNames.Difference(serverFacadeNames).SortedValues(), gc.DeepEquals, []string{}) + c.Check(serverFacadeNames.Difference(clientFacadeNames).SortedValues(), gc.HasLen, 0) + c.Check(clientFacadeNames.Difference(serverFacadeNames).SortedValues(), gc.HasLen, 0) // Next check that the best versions match c.Check(*api.FacadeVersions, gc.DeepEquals, serverFacadeBestVersions) }
review feedback. Tweak a bunch of documentation, etc.
juju_juju
train
e77023c7723f38d321ac3a3f15f3b76719aa89bb
diff --git a/src/MvcCore/Config/IniRead.php b/src/MvcCore/Config/IniRead.php index <HASH>..<HASH> 100644 --- a/src/MvcCore/Config/IniRead.php +++ b/src/MvcCore/Config/IniRead.php @@ -47,6 +47,7 @@ trait IniRead { $objectType[1] = (object) $objectType[1]; $this->envData[$envName] = $data; } + return TRUE; } diff --git a/src/MvcCore/Config/ReadWrite.php b/src/MvcCore/Config/ReadWrite.php index <HASH>..<HASH> 100644 --- a/src/MvcCore/Config/ReadWrite.php +++ b/src/MvcCore/Config/ReadWrite.php @@ -47,7 +47,7 @@ trait ReadWrite { '%appPath%', $app->GetAppDir(), ltrim($appRootRelativePath, '/') )); if (!array_key_exists($configFullPath, self::$configsCache)) { - $config = $configClass::getConfigInstance($configFullPath, $configClass, TRUE); + $config = $configClass::LoadConfig($configFullPath, $configClass, TRUE); if ($config) { $environment = $app->GetEnvironment(); $doNotThrownError = func_num_args() > 0 ? func_get_arg(0) : FALSE; @@ -84,7 +84,7 @@ trait ReadWrite { if (!array_key_exists($configFullPath, self::$configsCache)) { $systemConfigClass = $app->GetConfigClass(); $isSystem = $systemConfigClass::GetSystemConfigPath() === '/' . $appRootRelativePath; - $config = $systemConfigClass::getConfigInstance($configFullPath, $systemConfigClass, $isSystem); + $config = $systemConfigClass::LoadConfig($configFullPath, $systemConfigClass, $isSystem); if ($config) { $environment = $app->GetEnvironment(); $doNotThrownError = func_num_args() > 1 ? func_get_arg(1) : FALSE; @@ -131,13 +131,13 @@ trait ReadWrite { } /** - * Try to load and parse config file by absolute path. + * @inheritDocs * @param string $configFullPath * @param string $systemConfigClass * @param bool $isSystemConfig * @return \MvcCore\Config|bool */ - protected static function getConfigInstance ($configFullPath, $systemConfigClass, $isSystemConfig = FALSE) { + public static function LoadConfig ($configFullPath, $systemConfigClass, $isSystemConfig = FALSE) { /** @var $config \MvcCore\Config */ $config = $systemConfigClass::CreateInstance([], $configFullPath); if (!file_exists($configFullPath)) { diff --git a/src/MvcCore/IConfig.php b/src/MvcCore/IConfig.php index <HASH>..<HASH> 100644 --- a/src/MvcCore/IConfig.php +++ b/src/MvcCore/IConfig.php @@ -102,6 +102,15 @@ interface IConfig { public static function GetConfig ($appRootRelativePath); /** + * Try to load and parse config file by absolute path. + * @param string $configFullPath + * @param string $systemConfigClass + * @param bool $isSystemConfig + * @return \MvcCore\Config|bool + */ + public static function LoadConfig ($configFullPath, $systemConfigClass, $isSystemConfig = FALSE); + + /** * Encode all data into string and store it in `\MvcCore\Config::$fullPath`. * @throws \Exception Configuration data was not possible to dump or write. * @return bool
protected method Config::getConfigInstance changed to public method Config::LoadConfig - because of extension needs it
mvccore_mvccore
train
8561192771a6cfc32f7f715b776e8d21acb039cf
diff --git a/python/test/function/test_assign.py b/python/test/function/test_assign.py index <HASH>..<HASH> 100644 --- a/python/test/function/test_assign.py +++ b/python/test/function/test_assign.py @@ -20,7 +20,7 @@ import pytest import numpy as np import nnabla as nn import nnabla.functions as F -from nbla_test_utils import list_context +from nbla_test_utils import list_context, recomputation_test from nnabla.testing import assert_allclose ctxs = list_context('Assign') @@ -63,3 +63,14 @@ def test_assign_forward_backward(seed, ctx, func_name): f.backward([dst, src], [assign], accum=[False]) assert np.all(dst.g == assign.g) assert np.all(src.g == np.zeros((2, 3, 4))) + + +@pytest.mark.parametrize("ctx, func_name", ctxs) +@pytest.mark.parametrize("seed", [314]) +def test_assign_recomputation(seed, ctx, func_name): + rng = np.random.RandomState(seed) + dst = nn.Variable((2, 3, 4)) + src = nn.Variable((2, 3, 4)) + + recomputation_test(rng=rng, func=F.assign, vinputs=[dst, src], + func_args=[], func_kwargs={}, ctx=ctx)
Add test for functions does not use `function_tester`
sony_nnabla
train
88d7b19e295f27e1dcf970b979585fa9d3984cd6
diff --git a/lib/twitter/rest/client.rb b/lib/twitter/rest/client.rb index <HASH>..<HASH> 100644 --- a/lib/twitter/rest/client.rb +++ b/lib/twitter/rest/client.rb @@ -59,13 +59,14 @@ module Twitter # Perform an HTTP GET request def get(path, params = {}) - request(:get, path, params) + headers = request_headers(:get, path, params) + request(:get, path, params, headers) end # Perform an HTTP POST request def post(path, params = {}) - signature_params = params.values.any? { |value| value.respond_to?(:to_io) } ? {} : params - request(:post, path, params, signature_params) + headers = params.values.any? { |value| value.respond_to?(:to_io) } ? request_headers(:post, path, params, {}) : request_headers(:post, path, params) + request(:post, path, params, headers) end # @return [Boolean] @@ -87,11 +88,8 @@ module Twitter @connection ||= Faraday.new(ENDPOINT, connection_options) end - def request(method, path, params = {}, signature_params = params) - response = connection.send(method.to_sym, path, params) do |request| - request.headers.update(request_headers(method, path, params, signature_params)) - end - response.env + def request(method, path, params = {}, headers = {}) + connection.send(method.to_sym, path, params) { |request| request.headers.update(headers) }.env rescue Faraday::Error::TimeoutError, Timeout::Error => error raise(Twitter::Error::RequestTimeout.new(error)) rescue Faraday::Error::ClientError, JSON::ParserError => error @@ -106,12 +104,12 @@ module Twitter headers[:authorization] = bearer_token_credentials_auth_header headers[:content_type] = 'application/x-www-form-urlencoded; charset=UTF-8' else - headers[:authorization] = auth_token(method, path, params, signature_params) + headers[:authorization] = auth_header(method, path, params, signature_params) end headers end - def auth_token(method, path, params = {}, signature_params = params) + def auth_header(method, path, params = {}, signature_params = params) if !user_token? @bearer_token = token unless bearer_token? bearer_auth_header diff --git a/spec/twitter/rest/oauth_spec.rb b/spec/twitter/rest/oauth_spec.rb index <HASH>..<HASH> 100644 --- a/spec/twitter/rest/oauth_spec.rb +++ b/spec/twitter/rest/oauth_spec.rb @@ -8,17 +8,13 @@ describe Twitter::REST::OAuth do describe '#token' do before do - # WebMock treats Basic Auth differently so we have to chack against the full URL with credentials. + # Faraday treats Basic Auth differently so we have to use the full URL with credentials @oauth2_token_url = 'https://CK:CS@api.twitter.com/oauth2/token' - stub_request(:post, @oauth2_token_url).with(:body => 'grant_type=client_credentials').to_return(:body => fixture('bearer_token.json'), :headers => {:content_type => 'application/json; charset=utf-8'}) + stub_request(:post, @oauth2_token_url).with(:body => {'grant_type' => 'client_credentials'}).to_return(:body => fixture('bearer_token.json'), :headers => {:content_type => 'application/json; charset=utf-8'}) end it 'requests the correct resource' do @client.token - expect(a_request(:post, @oauth2_token_url).with(:body => {:grant_type => 'client_credentials'})).to have_been_made - end - it 'requests with the correct headers' do - @client.token - expect(a_request(:post, @oauth2_token_url).with(:headers => {:content_type => 'application/x-www-form-urlencoded; charset=UTF-8', :accept => '*/*'})).to have_been_made + expect(a_request(:post, @oauth2_token_url).with(:body => {:grant_type => 'client_credentials'}, :headers => {:content_type => 'application/x-www-form-urlencoded; charset=UTF-8', :accept => '*/*'})).to have_been_made end it 'returns the bearer token' do bearer_token = @client.token
Refactor Twitter::REST::Client and OAuth specs
sferik_twitter
train
3f7deff1c527cba40af60e3e9e60d029653a2a2c
diff --git a/lib/setup.js b/lib/setup.js index <HASH>..<HASH> 100644 --- a/lib/setup.js +++ b/lib/setup.js @@ -14,6 +14,12 @@ var logger = require('./logger'); module.exports = Promise.method(function (opts) { + opts = opts || {}; + // because this value is used during initialization of the environment we need to set it + // earlier and ensure that the rest of the stack is using the correct values + opts.package = resolve(opts.package || process.cwd()); + // this will be reset, below to be the actual cwd for sanity + opts.cwd = opts.package; return opts.__setup__ ? opts : env(opts).then(function (opts) { return configure(opts); }); @@ -28,8 +34,11 @@ function configure (opts) { // @note this was designed to allow existing code in the packager stream system to // continue to work and is less than ideal + // there is a convoluted usecase here for this property as it was most likely set to the + // "package" value earlier to allow env to setup with the correct configuration file + // but the remaining use-case for it is differentiated as the literal cwd and package is used + // where appropriate so we reset the value here opts.cwd = process.cwd(); - opts.package = resolve(opts.package || opts.env.cwd); opts.name = defaults('name', opts) || path.basename(opts.package); opts.logLevel = string('logLevel', opts.logLevel, opts); opts.production = bool('production', opts.production, opts);
ensure that the correct environment is read and used when targeting packages that aren't the same as cwd
enyojs_enyo-dev
train
90434ff4ea4477941444f1e83313beb414838535
diff --git a/cookiecutter/hooks.py b/cookiecutter/hooks.py index <HASH>..<HASH> 100644 --- a/cookiecutter/hooks.py +++ b/cookiecutter/hooks.py @@ -54,11 +54,14 @@ def find_hook(hook_name, hooks_dir='hooks'): logger.debug('No hooks/dir in template_dir') return None + scripts = [] for hook_file in os.listdir(hooks_dir): if valid_hook(hook_file, hook_name): - return os.path.abspath(os.path.join(hooks_dir, hook_file)) + scripts.append(os.path.abspath(os.path.join(hooks_dir, hook_file))) - return None + if len(scripts) == 0: + return None + return scripts def run_script(script_path, cwd='.'): @@ -119,9 +122,10 @@ def run_hook(hook_name, project_dir, context): :param project_dir: The directory to execute the script from. :param context: Cookiecutter project context. """ - script = find_hook(hook_name) - if script is None: + scripts = find_hook(hook_name) + if not scripts: logger.debug('No %s hook found', hook_name) return logger.debug('Running hook %s', hook_name) - run_script_with_context(script, project_dir, context) + for script in scripts: + run_script_with_context(script, project_dir, context) diff --git a/tests/test_hooks.py b/tests/test_hooks.py index <HASH>..<HASH> 100644 --- a/tests/test_hooks.py +++ b/tests/test_hooks.py @@ -9,7 +9,7 @@ import pytest from cookiecutter import hooks, utils, exceptions -def make_test_repo(name): +def make_test_repo(name, multiple_hooks=False): """Create test repository for test setup methods.""" hook_dir = os.path.join(name, 'hooks') template = os.path.join(name, 'input{{hooks}}') @@ -47,6 +47,26 @@ def make_test_repo(name): # Set the execute bit os.chmod(filename, os.stat(filename).st_mode | stat.S_IXUSR) + # Adding an additional pre script + if multiple_hooks: + if sys.platform.startswith('win'): + pre = 'pre_gen_project.bat' + with open(os.path.join(hook_dir, pre), 'w') as f: + f.write("@echo off\n") + f.write("\n") + f.write("echo post generation hook\n") + f.write("echo. >shell_pre.txt\n") + else: + pre = 'pre_gen_project.sh' + filename = os.path.join(hook_dir, pre) + with open(filename, 'w') as f: + f.write("#!/bin/bash\n") + f.write("\n") + f.write("echo 'post generation hook';\n") + f.write("touch 'shell_pre.txt'\n") + # Set the execute bit + os.chmod(filename, os.stat(filename).st_mode | stat.S_IXUSR) + return post @@ -68,11 +88,11 @@ class TestFindHooks(object): with utils.work_in(self.repo_path): expected_pre = os.path.abspath('hooks/pre_gen_project.py') actual_hook_path = hooks.find_hook('pre_gen_project') - assert expected_pre == actual_hook_path + assert expected_pre == actual_hook_path[0] expected_post = os.path.abspath('hooks/{}'.format(self.post_hook)) actual_hook_path = hooks.find_hook('post_gen_project') - assert expected_post == actual_hook_path + assert expected_post == actual_hook_path[0] def test_no_hooks(self): """`find_hooks` should return None if the hook could not be found.""" @@ -98,7 +118,7 @@ class TestExternalHooks(object): def setup_method(self, method): """External hooks related tests setup fixture.""" - self.post_hook = make_test_repo(self.repo_path) + self.post_hook = make_test_repo(self.repo_path, multiple_hooks=True) def teardown_method(self, method): """External hooks related tests teardown fixture.""" @@ -108,6 +128,8 @@ class TestExternalHooks(object): os.remove('python_pre.txt') if os.path.exists('shell_post.txt'): os.remove('shell_post.txt') + if os.path.exists('shell_pre.txt'): + os.remove('shell_pre.txt') if os.path.exists('tests/shell_post.txt'): os.remove('tests/shell_post.txt') if os.path.exists('tests/test-hooks/input{{hooks}}/python_pre.txt'): @@ -163,6 +185,7 @@ class TestExternalHooks(object): with utils.work_in(self.repo_path): hooks.run_hook('pre_gen_project', tests_dir, {}) assert os.path.isfile(os.path.join(tests_dir, 'python_pre.txt')) + assert os.path.isfile(os.path.join(tests_dir, 'shell_pre.txt')) hooks.run_hook('post_gen_project', tests_dir, {}) assert os.path.isfile(os.path.join(tests_dir, 'shell_post.txt'))
Changed: Generated projects can use multiple type hooks at same time. (sh + py) (#<I>) * Added support for multiple hooks in a single project * Added tests for Pull #<I> * Fix linting
audreyr_cookiecutter
train
5df7fab5b886e239813caa31b124a316fe60a2cb
diff --git a/annis-visualizers/src/main/java/annis/visualizers/component/tree/AnnisGraphTools.java b/annis-visualizers/src/main/java/annis/visualizers/component/tree/AnnisGraphTools.java index <HASH>..<HASH> 100644 --- a/annis-visualizers/src/main/java/annis/visualizers/component/tree/AnnisGraphTools.java +++ b/annis-visualizers/src/main/java/annis/visualizers/component/tree/AnnisGraphTools.java @@ -26,6 +26,9 @@ import edu.uci.ics.jung.graph.DirectedGraph; import edu.uci.ics.jung.graph.DirectedSparseGraph; import java.io.Serializable; import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.LinkedList; import java.util.List; import java.util.Set; @@ -51,16 +54,49 @@ public class AnnisGraphTools implements Serializable String terminalNamespace = input.getMappings().getProperty( TigerTreeVisualizer.TERMINAL_NS_KEY); - List<DirectedGraph<AnnisNode, Edge>> resultGraphs = - new ArrayList<DirectedGraph<AnnisNode, Edge>>(); + List<DirectedGraph<AnnisNode, Edge>> resultGraphs = new ArrayList<>(); + List<AnnisNode> rootNodes = new LinkedList<>(); + for (AnnisNode n : ag.getNodes()) { if (isRootNode(n, namespace)) { - resultGraphs.add(extractGraph(ag, n, terminalNamespace, terminalName)); + rootNodes.add(n); } } + + //sort root nodes according to their left-most covered token + HorizontalOrientation orientation = detectLayoutDirection(ag); + if (orientation == HorizontalOrientation.LEFT_TO_RIGHT) + { + Collections.sort(rootNodes, new Comparator<AnnisNode>() + { + @Override + public int compare(AnnisNode o1, AnnisNode o2) + { + return Long.compare(o1.getLeftToken(), o2.getLeftToken()); + } + } + ); + } + else if (orientation == HorizontalOrientation.RIGHT_TO_LEFT) + { + Collections.sort(rootNodes, new Comparator<AnnisNode>() + { + @Override + public int compare(AnnisNode o1, AnnisNode o2) + { + return Long.compare(o2.getLeftToken(), o1.getLeftToken()); + } + } + ); + } + for(AnnisNode r : rootNodes) + { + resultGraphs.add(extractGraph(ag, r, terminalNamespace, terminalName)); + } + return resultGraphs; }
sort the root nodes by their left_token value before adding their subgraphs to the list this fixes #<I> and makes sure the graphs are outputted in the correct order
korpling_ANNIS
train
dd6950288fbf5edd37e44561f10249a9345934c8
diff --git a/hystrix-core/src/test/java/com/netflix/hystrix/HystrixCommandTest.java b/hystrix-core/src/test/java/com/netflix/hystrix/HystrixCommandTest.java index <HASH>..<HASH> 100644 --- a/hystrix-core/src/test/java/com/netflix/hystrix/HystrixCommandTest.java +++ b/hystrix-core/src/test/java/com/netflix/hystrix/HystrixCommandTest.java @@ -3124,7 +3124,7 @@ public class HystrixCommandTest extends CommonHystrixCommandTests<TestHystrixCom if (acquired) { try { numAcquired.incrementAndGet(); - Thread.sleep(10); + Thread.sleep(100); } catch (InterruptedException ex) { ex.printStackTrace(); } finally {
Deflaked test of semaphore concurrency
Netflix_Hystrix
train
63163eccc2b9bf3962ea93ca210bda6e291a279c
diff --git a/Form/ChoiceList/PhpcrOdmQueryBuilderLoader.php b/Form/ChoiceList/PhpcrOdmQueryBuilderLoader.php index <HASH>..<HASH> 100644 --- a/Form/ChoiceList/PhpcrOdmQueryBuilderLoader.php +++ b/Form/ChoiceList/PhpcrOdmQueryBuilderLoader.php @@ -80,6 +80,14 @@ class PhpcrOdmQueryBuilderLoader implements EntityLoaderInterface */ public function getEntitiesByIds($identifier, array $values) { + $values = array_values(array_filter($values, function ($v) { + return !empty($v); + })); + + if (0 == count($values)) { + return array(); + } + /* performance: if we could figure out whether the query builder is " * empty" (that is only checking for the class) we could optimize this * to a $this->dm->findMany(null, $values)
skip query when there is no valid id
doctrine_DoctrinePHPCRBundle
train
6c26a87901d12188dfd9986d9211f6077a286f9d
diff --git a/pkg/apparmor/setup.go b/pkg/apparmor/setup.go index <HASH>..<HASH> 100644 --- a/pkg/apparmor/setup.go +++ b/pkg/apparmor/setup.go @@ -99,11 +99,15 @@ func InstallDefaultProfile(backupPath string) error { return err } - // the current functionality of the load script is the exit 0 if the parser does not exist. - // we think we should fail loudly if you have apparmor enabled but not the parser to load - // the profile for use. output, err := exec.Command("/sbin/apparmor_parser", "-r", "-W", "docker").CombinedOutput() - if err != nil { + if err != nil && !os.IsNotExist(err) { + if e, ok := err.(*exec.Error); ok { + // keeping with the current profile load code, if the parser does not exist then + // just return + if e.Err == exec.ErrNotFound || os.IsNotExist(e.Err) { + return nil + } + } return fmt.Errorf("Error loading docker profile: %s (%s)", err, output) } return nil
Ignore is not exist error Docker-DCO-<I>-
containers_storage
train
e611476cb7993024ef879ba3af697fa5e387ea9e
diff --git a/starlette/staticfiles.py b/starlette/staticfiles.py index <HASH>..<HASH> 100644 --- a/starlette/staticfiles.py +++ b/starlette/staticfiles.py @@ -107,12 +107,6 @@ class StaticFiles: if scope["method"] not in ("GET", "HEAD"): return PlainTextResponse("Method Not Allowed", status_code=405) - if path.startswith(".."): - # Most clients will normalize the path, so we shouldn't normally - # get this, but don't allow misbehaving clients to break out of - # the static files directory. - return PlainTextResponse("Not Found", status_code=404) - full_path, stat_result = await self.lookup_path(path) if stat_result and stat.S_ISREG(stat_result.st_mode): @@ -147,6 +141,12 @@ class StaticFiles: ) -> typing.Tuple[str, typing.Optional[os.stat_result]]: for directory in self.all_directories: full_path = os.path.join(directory, path) + if ( + os.path.commonprefix([os.path.realpath(full_path), directory]) + != directory + ): + # Don't allow misbehaving clients to break out of the static files directory. + continue try: stat_result = await aio_stat(full_path) return (full_path, stat_result)
More robust path-traversal check in StaticFiles app (#<I>) * More robust path-traversal check in StaticFiles app * Linting
encode_starlette
train
e6e8a75fd9c8541ebbcb7197dc70204f65504aa6
diff --git a/nlppln/utils.py b/nlppln/utils.py index <HASH>..<HASH> 100644 --- a/nlppln/utils.py +++ b/nlppln/utils.py @@ -71,5 +71,17 @@ def copy_cwl_files(from_dir=CWL_PATH): fo = os.path.join(cwl_data_dir, os.path.basename(fi)) shutil.copy2(fi, fo) + +def get_files(directory): + """Return a list of all files in the directory.""" + files_out = [] + for f in os.listdir(directory): + fi = os.path.join(directory, f) + if os.path.isfile(fi): + files_out.append(fi) + + # order alphabetically on file name + return sorted(files_out) + if __name__ == '__main__': copy_cwl_files()
Add utility function to return a list of all files in a directory
nlppln_nlppln
train
c83f1f3461ca86f714ffa0469a8b1bbd1aa5efdc
diff --git a/lib/rails/mongoid.rb b/lib/rails/mongoid.rb index <HASH>..<HASH> 100644 --- a/lib/rails/mongoid.rb +++ b/lib/rails/mongoid.rb @@ -9,7 +9,7 @@ module Rails #:nodoc: def load_models(app) app.config.paths['app/models'].each do |path| Dir.glob("#{path}/**/*.rb").sort.each do |file| - require_dependency(file) + require_dependency(File.basename(file, ".rb")) end end end
fix error loading with config.cache_classes = true (rails edge)
mongodb_mongoid
train
6d4def2e9fa52403e054b616f29464e8848b1618
diff --git a/brewpi_service/__init__.py b/brewpi_service/__init__.py index <HASH>..<HASH> 100644 --- a/brewpi_service/__init__.py +++ b/brewpi_service/__init__.py @@ -17,7 +17,7 @@ app.config.update({ plugins=['apispec.ext.marshmallow'] ), 'APISPEC_SWAGGER_URL': '/specs/', - 'APISPEC_SWAGGER_UI_URL': '/docs/', + 'APISPEC_SWAGGER_UI_URL': None, "SECRET_KEY": 'A0Zr98j/3yX R~XHH!jmN]LWX/,?RT', "SQLALCHEMY_DATABASE_URI": 'sqlite:///brewpi-service.db' })
don't include swagger ui
BrewBlox_brewblox-service
train
13fec7735c763863a8dbc6f1c65054c9b4eab8eb
diff --git a/addrconvs.go b/addrconvs.go index <HASH>..<HASH> 100644 --- a/addrconvs.go +++ b/addrconvs.go @@ -40,11 +40,11 @@ const ( // TestNetKey is the key identifier for TestNet TestNetKey = 0xef - // MainNetScriptHash is the address identifier for MainNet + // MainNetScriptHash is the script hash identifier for MainNet MainNetScriptHash = 0x05 - // TestNetScriptHash is the address identifier for TestNet - TestNetScriptHash = 0xC4 + // TestNetScriptHash is the script hash identifier for TestNet + TestNetScriptHash = 0xc4 ) // EncodeAddress takes a 20-byte raw payment address (hash160 of a pubkey) @@ -67,7 +67,7 @@ func EncodeAddress(addrHash []byte, net btcwire.BitcoinNet) (encoded string, err return encodeHashWithNetId(netID, addrHash) } -// EncodeScriptHash takes a 20-byte raw script hash (hash160 of a pubkey) +// EncodeScriptHash takes a 20-byte raw script hash (hash160 of the SHA256 of the redeeming script) // and the Bitcoin network to create a human-readable payment address string. func EncodeScriptHash(addrHash []byte, net btcwire.BitcoinNet) (encoded string, err error) { if len(addrHash) != ripemd160.Size { diff --git a/addrconvs_test.go b/addrconvs_test.go index <HASH>..<HASH> 100644 --- a/addrconvs_test.go +++ b/addrconvs_test.go @@ -137,3 +137,49 @@ func TestEncodeDecodePrivateKey(t *testing.T) { } } + +var encodeScriptHashTests = []struct { + raw []byte + net btcwire.BitcoinNet + res string + err error +}{ + {[]byte{0xf8, 0x15, 0xb0, 0x36, 0xd9, 0xbb, 0xbc, 0xe5, 0xe9, 0xf2, 0xa0, 0x0a, 0xbd, 0x1b, 0xf3, 0xdc, 0x91, 0xe9, 0x55, 0x10}, + btcwire.MainNet, "3QJmV3qfvL9SuYo34YihAf3sRCW3qSinyC", nil}, + + {[]byte{0xe8, 0xc3, 0x00, 0xc8, 0x79, 0x86, 0xef, 0xa8, 0x4c, 0x37, 0xc0, 0x51, 0x99, 0x29, 0x01, 0x9e, 0xf8, 0x6e, 0xb5, 0xb4}, + btcwire.MainNet, "3NukJ6fYZJ5Kk8bPjycAnruZkE5Q7UW7i8", nil}, + + // Raw address not 20 bytes (padded with leading 0s) + {[]byte{0x00, 0xf8, 0x15, 0xb0, 0x36, 0xd9, 0xbb, 0xbc, 0xe5, 0xe9, 0xf2, 0xa0, 0x0a, 0xbd, 0x1b, 0xf3, 0xdc, 0x91, 0xe9, 0x55, 0x10}, + btcwire.MainNet, "3QJmV3qfvL9SuYo34YihAf3sRCW3qSinyC", btcutil.ErrMalformedAddress}, + + {[]byte{0x78, 0xb3, 0x16, 0xa0, 0x86, 0x47, 0xd5, 0xb7, 0x72, 0x83, 0xe5, 0x12, 0xd3, 0x60, 0x3f, 0x1f, 0x1c, 0x8d, 0xe6, 0x8f}, + btcwire.TestNet, "", btcutil.ErrUnknownNet}, + + // from bitcoind base58_keys_valid + {[]byte{0xc5, 0x79, 0x34, 0x2c, 0x2c, 0x4c, 0x92, 0x20, 0x20, 0x5e, 0x2c, 0xdc, 0x28, 0x56, 0x17, 0x04, 0x0c, 0x92, 0x4a, 0x0a}, + btcwire.TestNet3, "2NBFNJTktNa7GZusGbDbGKRZTxdK9VVez3n", nil}, + + // from bitcoind base58_keys_valid + {[]byte{0x63, 0xbc, 0xc5, 0x65, 0xf9, 0xe6, 0x8e, 0xe0, 0x18, 0x9d, 0xd5, 0xcc, 0x67, 0xf1, 0xb0, 0xe5, 0xf0, 0x2f, 0x45, 0xcb}, + btcwire.MainNet, "3AnNxabYGoTxYiTEZwFEnerUoeFXK2Zoks", nil}, + + // Bad network + {make([]byte, 20), 0, "", btcutil.ErrUnknownNet}, +} + +func TestEncodeScriptHashes(t *testing.T) { + for i := range encodeScriptHashTests { + res, err := btcutil.EncodeScriptHash(encodeScriptHashTests[i].raw, + encodeScriptHashTests[i].net) + if err != encodeScriptHashTests[i].err { + t.Errorf("Error Results differ: Expected '%v', returned '%v'", encodeScriptHashTests[i].err, err) + continue + } + if err == nil && res != encodeScriptHashTests[i].res { + t.Errorf("Results differ: Expected '%s', returned '%s'", + encodeScriptHashTests[i].res, res) + } + } +}
Fixed comments and added tests for scriptHash encoding Fixed bad comments on new scriptHash constants and encoding function. Added encodeScriptHashTests to addrconvs_test.go to check correctness of output.
btcsuite_btcutil
train
9a62d2df6253dc4ad20bf7ab50ba763d50b0d441
diff --git a/src/main/java/org/cyclopsgroup/jcli/impl/ParsingContextBuilder.java b/src/main/java/org/cyclopsgroup/jcli/impl/ParsingContextBuilder.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/cyclopsgroup/jcli/impl/ParsingContextBuilder.java +++ b/src/main/java/org/cyclopsgroup/jcli/impl/ParsingContextBuilder.java @@ -78,10 +78,10 @@ class ParsingContextBuilder<T> { } private static <T> ImmutableList<ValueReference<T>> referenceOfFields(Class<T> beanType) { - return ImmutableList - .copyOf(FluentIterable.from(beanType.getFields()).append(beanType.getDeclaredFields()) - .toList().stream().map(f -> ValueReference.<T>instanceOf(f)) - .collect(Collectors.toMap(f -> f.getName(), f -> f)).values()); + Map<String, ValueReference<T>> refMap = new HashMap<>(); + FluentIterable.from(beanType.getFields()).append(beanType.getDeclaredFields()).toList().stream() + .map(f -> ValueReference.<T>instanceOf(f)).forEach(f -> refMap.put(f.getName(), f)); + return ImmutableList.copyOf(refMap.values()); } private final Class<T> beanType;
Avoid failing because of overriding fields
jiaqi_jcli
train
1ab1696070e411db521607851b5759118ef5f285
diff --git a/src/kba/pipeline/_truncate.py b/src/kba/pipeline/_truncate.py index <HASH>..<HASH> 100644 --- a/src/kba/pipeline/_truncate.py +++ b/src/kba/pipeline/_truncate.py @@ -1,19 +1,40 @@ +''' +pipeline stage for truncating chunks at a fixed length and deleting +the overage -def truncate(config): +This software is released under an MIT/X11 open source license. + +Copyright 2012 Diffeo, Inc. +''' + +import os +from streamcorpus import Chunk + +class truncate(object): ''' - returns a kba.pipeline "transform" function that populates - body.media_type if it is empty and the content type is easily - guessed. + kba.pipeline "transform" callable of the "batch" type that takes a + chunk and replaces with one that has up to max_items in it. ''' - ## make a closure around config - global count - count = 0 - def _truncate(stream_item): - global count - count += 1 - if count < config['max_items']: - return stream_item - else: - return None - - return _truncate + def __init__(self, config): + self.config = config + + def __call__(self, chunk_path): + ''' + batch-type transform stage: reads a chunk from chunk_path, and + replaces it with a new chunk at the same path + ''' + ## make a new output chunk at a temporary path + tmp_chunk_path = chunk_path + '_' + t_chunk = Chunk(path=tmp_chunk_path) + + for num, si in enumerate(Chunk(path=chunk_path)): + if num < config['max_items']: + t_chunk.add(si) + else: + break + + ## flush to disk + t_chunk.close() + + ## atomic rename new chunk file into place + os.rename(tmp_chunk_path, chunk_path)
converting truncate to a batch transform
trec-kba_streamcorpus-pipeline
train
2ce03a923e651584d6d72b09830f9e402e3c2bd1
diff --git a/provider/common/bootstrap.go b/provider/common/bootstrap.go index <HASH>..<HASH> 100644 --- a/provider/common/bootstrap.go +++ b/provider/common/bootstrap.go @@ -116,9 +116,13 @@ func BootstrapInstance(ctx environs.BootstrapContext, env environs.Environ, args finalize := func(ctx environs.BootstrapContext, icfg *instancecfg.InstanceConfig) error { icfg.InstanceId = result.Instance.Id() icfg.HardwareCharacteristics = result.Hardware - envConfig := result.Config - if envConfig == nil { - envConfig = env.Config() + envConfig := env.Config() + if result.Config != nil { + updated, err := envConfig.Apply(result.Config.UnknownAttrs()) + if err != nil { + return errors.Trace(err) + } + envConfig = updated } if err := instancecfg.FinishInstanceConfig(icfg, envConfig); err != nil { return err
Only propagate the "unknown" attrs from result.Config.
juju_juju
train
14f687b7355df142ea76145dea53bcbe7d012fdc
diff --git a/oauthlib/common.py b/oauthlib/common.py index <HASH>..<HASH> 100644 --- a/oauthlib/common.py +++ b/oauthlib/common.py @@ -9,10 +9,8 @@ This module provides data structures and utilities common to all implementations of OAuth. """ -import Crypto.PublicKey.RSA as RSA import collections import datetime -import jwt import logging import random import re @@ -238,6 +236,9 @@ def generate_token(length=30, chars=UNICODE_ASCII_CHARACTER_SET): def generate_crypto_token(private_pem, request): + import Crypto.PublicKey.RSA as RSA + import jwt + private_key = RSA.importKey(private_pem) now = datetime.datetime.utcnow() @@ -256,6 +257,9 @@ def generate_crypto_token(private_pem, request): def verify_crypto_token(private_pem, token): + import Crypto.PublicKey.RSA as RSA + import jwt + public_key = RSA.importKey(private_pem).publickey() try: diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100755 --- a/setup.py +++ b/setup.py @@ -21,8 +21,10 @@ if sys.version_info[0] == 3: tests_require = ['nose', 'pycrypto', 'pyjwt'] else: tests_require = ['nose', 'unittest2', 'pycrypto', 'mock', 'pyjwt'] +rsa_require = ['pycrypto'] +cryptotoken_require = ['pycrypto', 'pyjwt'] -requires = ['pycrypto', 'pyjwt'] +requires = [] setup( name='oauthlib', @@ -39,7 +41,7 @@ setup( packages=find_packages(exclude=('docs', 'tests', 'tests.*')), test_suite='nose.collector', tests_require=tests_require, - extras_require={'test': tests_require}, + extras_require={'test': tests_require, 'rsa': rsa_require, 'cryptotoken': cryptotoken_require}, install_requires=requires, classifiers=[ 'Development Status :: 4 - Beta',
Move setup.py requirements * Moved requires for cryptotoken changes so that you don't have to install the dependencies if you aren't going to use cryptotokens
oauthlib_oauthlib
train
b42c4128f4105791f6fb697e3aa48d45a46b8846
diff --git a/can/interfaces/remote/server.py b/can/interfaces/remote/server.py index <HASH>..<HASH> 100644 --- a/can/interfaces/remote/server.py +++ b/can/interfaces/remote/server.py @@ -58,18 +58,10 @@ class ClientBusConnection(socketserver.BaseRequestHandler): self.server.clients.append(self) def handle(self): - event = self._next_event() - if isinstance(event, events.BusRequest): - self._start_bus(event) - elif isinstance(event, events.PeriodicMessageStart): - self._start_periodic_transmit(event) - else: + bus_event = self._next_event() + if not isinstance(bus_event, events.BusRequest): raise RemoteServerError('Handshake error') - def _start_bus(self, bus_event): - config = dict(self.server.config) - self.config = config - if bus_event.version != can.interfaces.remote.PROTOCOL_VERSION: raise RemoteServerError('Protocol version mismatch (%d != %d)' % ( bus_event.version, can.interfaces.remote.PROTOCOL_VERSION)) @@ -94,17 +86,16 @@ class ClientBusConnection(socketserver.BaseRequestHandler): finally: self.request.sendall(self.conn.next_data()) - self.send_thread = threading.Thread(target=self._send_to_client, - name='Send to client') - self.send_thread.daemon = True self.send_thread.start() self._receive_from_client() - def _start_periodic_transmit(self, start_event): - #: Cyclic send task - self.task = can.interface.CyclicSendTask(self.server.config['channel'], - start_event.msg, - start_event.period) + def finish(self): + logger.info("Closing connection to %s", self.request.getpeername()) + # Remove itself from the server's list of clients + self.server.clients.remove(self) + self.stop_event.set() + if self.send_thread.is_alive(): + self.send_thread.join(3) def _next_event(self): """Block until a new event has been received. @@ -151,14 +142,6 @@ class ClientBusConnection(socketserver.BaseRequestHandler): elif isinstance(event, events.PeriodicMessageStop): self.send_tasks[event.arbitration_id].stop() - logger.info('Closing connection to %s', self.request.getpeername()) - # Remove itself from the server's list of clients - self.server.clients.remove(self) - self.stop_event.set() - self.send_thread.join(1.0) - self.request.shutdown(socket.SHUT_WR) - self.request.close() - def _send_to_client(self): """Continuously read CAN messages and send to client.""" while not self.stop_event.is_set():
Some more refactoring Was reverted on last merge
hardbyte_python-can
train
2f3c03a89652684863e996a9ab2181bee80a8c8e
diff --git a/polyaxon/scheduler/spawners/tensorboard_spawner.py b/polyaxon/scheduler/spawners/tensorboard_spawner.py index <HASH>..<HASH> 100644 --- a/polyaxon/scheduler/spawners/tensorboard_spawner.py +++ b/polyaxon/scheduler/spawners/tensorboard_spawner.py @@ -76,10 +76,6 @@ class TensorboardSpawner(ProjectJobSpawner): return volumes, volume_mounts - @staticmethod - def fii(): - import json; data = json.loads(open('k.json').read()); content = []; for k in data: content.append('export {}={}'.format(k, data[k])); output = open('somefile.txt', 'w'); output.write('\n'.join(content)); output.close() - @classmethod def get_stores_secrets_command_args(cls, stores_secrets): """Create an auth command for S3 and GCS."""
Fix tensorboard spawner
polyaxon_polyaxon
train
a1f66cba3305efb7a0bb40c55bed1e18b9493401
diff --git a/src/broker/__tests__/connect.spec.js b/src/broker/__tests__/connect.spec.js index <HASH>..<HASH> 100644 --- a/src/broker/__tests__/connect.spec.js +++ b/src/broker/__tests__/connect.spec.js @@ -8,6 +8,7 @@ const { testIfKafka_1_1_0, } = require('testHelpers') +const Long = require('long') const Broker = require('../index') describe('Broker > connect', () => { @@ -125,6 +126,40 @@ describe('Broker > connect', () => { await broker.connect() expect(broker.isConnected()).toEqual(true) }) + + test('returns false when the session lifetime has expired', async () => { + const sessionLifetime = 15000 + const reauthenticationThreshold = 10000 + broker = new Broker({ + connection: createConnection(saslConnectionOpts()), + logger: newLogger(), + reauthenticationThreshold, + }) + + await broker.connect() + expect(broker.isConnected()).toEqual(true) + + broker.sessionLifetime = Long.fromValue(sessionLifetime) + const [seconds] = broker.authenticatedAt + broker.authenticatedAt = [seconds - sessionLifetime / 1000, 0] + + expect(broker.isConnected()).toEqual(false) + }) + }) + + test('returns true when the session lifetime is 0', async () => { + broker = new Broker({ + connection: createConnection(saslConnectionOpts()), + logger: newLogger(), + }) + + await broker.connect() + expect(broker.isConnected()).toEqual(true) + + broker.sessionLifetime = Long.ZERO + broker.authenticatedAt = [0, 0] + + expect(broker.isConnected()).toEqual(true) }) describe('when SaslAuthenticate protocol is available', () => { diff --git a/src/broker/index.js b/src/broker/index.js index <HASH>..<HASH> 100644 --- a/src/broker/index.js +++ b/src/broker/index.js @@ -109,7 +109,7 @@ module.exports = class Broker { }) } - if (!this.authenticated && this.connection.sasl) { + if (this.authenticatedAt == null && this.connection.sasl) { const authenticator = new SASLAuthenticator( this.connection, this.rootLogger,
Test broker.isConnected taking session lifetime into account
tulios_kafkajs
train
8f24051e776b6cbe013cfc7d501fac699d938f88
diff --git a/http-netty/src/main/java/io/micronaut/http/netty/NettyMutableHttpResponse.java b/http-netty/src/main/java/io/micronaut/http/netty/NettyMutableHttpResponse.java index <HASH>..<HASH> 100644 --- a/http-netty/src/main/java/io/micronaut/http/netty/NettyMutableHttpResponse.java +++ b/http-netty/src/main/java/io/micronaut/http/netty/NettyMutableHttpResponse.java @@ -57,7 +57,7 @@ public class NettyMutableHttpResponse<B> implements MutableHttpResponse<B> { final NettyHttpHeaders headers; private final ConversionService conversionService; private B body; - private final Map<Class, Optional> convertedBodies = new LinkedHashMap<>(1); + private final Map<Class, Optional> convertedBodies = Collections.synchronizedMap(new LinkedHashMap<>(1)); private final MutableConvertibleValues<Object> attributes; private ServerCookieEncoder serverCookieEncoder = DEFAULT_SERVER_COOKIE_ENCODER;
Avoid sync issue in getBody (#<I>) * Avoid sync issue in getBody Avoid ConcurrentModificationException in getBody by using ConcurrentLinkedHashMap instead of lInkedHashMap. * Updated to use synchronizedMap
micronaut-projects_micronaut-core
train
11364f96ce0c8ad8e288add4fa3342692356da3c
diff --git a/http/router.go b/http/router.go index <HASH>..<HASH> 100644 --- a/http/router.go +++ b/http/router.go @@ -76,7 +76,7 @@ func (h baseHandler) panic(w http.ResponseWriter, r *http.Request, rcv interface pe := &platform.Error{ Code: platform.EInternal, Msg: "a panic has occurred", - Err: fmt.Errorf("%v", rcv), + Err: fmt.Errorf("%s: %v", r.URL.String(), rcv), } l := getPanicLogger() diff --git a/http/router_test.go b/http/router_test.go index <HASH>..<HASH> 100644 --- a/http/router_test.go +++ b/http/router_test.go @@ -174,7 +174,7 @@ func TestRouter_Panic(t *testing.T) { body: ` { "code": "internal error", - "message": "a panic has occurred: not implemented" + "message": "a panic has occurred: /ping: not implemented" }`, }, },
chore(http): add route path to panic log message
influxdata_influxdb
train
149bed73cc65a001954dfc120d7ddc8a601c71bd
diff --git a/prompt.py b/prompt.py index <HASH>..<HASH> 100644 --- a/prompt.py +++ b/prompt.py @@ -107,7 +107,7 @@ class PromptInterface(object): 'withdraw holds # lists all current holds', 'withdraw completed # lists completed holds eligible for cleanup', 'withdraw cancel # cancels current holds', - 'witdraw cleanup # cleans up completed holds', + 'withdraw cleanup # cleans up completed holds', 'withdraw # withdraws the first hold availabe', 'withdraw all # withdraw all holds available', 'send {assetId or name} {address} {amount} (--from-addr={addr})',
fix typo in prompt.py (witdraw -> withdraw) (#<I>)
CityOfZion_neo-python
train
55320753c7e069fe8d06fc5b5089f576139a6975
diff --git a/lib/google/apis/core/http_command.rb b/lib/google/apis/core/http_command.rb index <HASH>..<HASH> 100644 --- a/lib/google/apis/core/http_command.rb +++ b/lib/google/apis/core/http_command.rb @@ -253,7 +253,7 @@ module Google # @return [Object] result if no block given # @yield [result, nil] if block given def success(result, &block) - logger.debug { sprintf('Success - %s', PP.pp(result, '')) } + logger.debug { sprintf('Success - %s', safe_object_representation(result)) } block.call(result, nil) if block_given? result end @@ -333,6 +333,19 @@ module Google private + UNSAFE_CLASS_NAMES = [ + "Google::Apis::CloudkmsV1::DecryptResponse" + ] + + def safe_object_representation obj + name = obj.class.name + if UNSAFE_CLASS_NAMES.include? name + "#<#{name} (fields redacted)>" + else + PP.pp(obj, "") + end + end + def opencensus_begin_span return unless OPENCENSUS_AVAILABLE && options.use_opencensus return if @opencensus_span diff --git a/spec/google/apis/core/http_command_spec.rb b/spec/google/apis/core/http_command_spec.rb index <HASH>..<HASH> 100644 --- a/spec/google/apis/core/http_command_spec.rb +++ b/spec/google/apis/core/http_command_spec.rb @@ -15,6 +15,15 @@ require 'spec_helper' require 'google/apis/core/http_command' +module Google + module Apis + module CloudkmsV1 + class DecryptResponse + end + end + end +end + RSpec.describe Google::Apis::Core::HttpCommand do include TestHelpers include_context 'HTTP client' @@ -429,4 +438,23 @@ RSpec.describe Google::Apis::Core::HttpCommand do command.execute(client) end + describe "#safe_object_representation" do + let(:command) do + Google::Apis::Core::HttpCommand.new(:get, 'https://www.googleapis.com/zoo/animals') + end + + it "should show fields in a normal object" do + obj = Object.new + obj.instance_variable_set(:@foobar, "hi") + str = command.send(:safe_object_representation, obj) + expect(str).to match /@foobar/ + end + + it "should not show fields in a restricted object" do + obj = Google::Apis::CloudkmsV1::DecryptResponse.new + obj.instance_variable_set(:@foobar, "hi") + str = command.send(:safe_object_representation, obj) + expect(str).not_to match /@foobar/ + end + end end
Change logged representation of certain classes (#<I>)
googleapis_google-api-ruby-client
train
962a5373060fdb87f36b7ba653bf0f5ebe467f51
diff --git a/public/bundle.js b/public/bundle.js index <HASH>..<HASH> 100644 --- a/public/bundle.js +++ b/public/bundle.js @@ -13379,6 +13379,8 @@ var VEvents = function () { this.vComponent.actionsStarted(this); } + new __WEBPACK_IMPORTED_MODULE_4__events_errors__["a" /* VErrors */](root).clearErrors(); + pseries(fnlist).then(function (results) { var result = results.pop(); var contentType = result.contentType; diff --git a/public/wc.js b/public/wc.js index <HASH>..<HASH> 100644 --- a/public/wc.js +++ b/public/wc.js @@ -9573,6 +9573,8 @@ var VEvents = function () { this.vComponent.actionsStarted(this); } + new __WEBPACK_IMPORTED_MODULE_4__events_errors__["a" /* VErrors */](root).clearErrors(); + pseries(fnlist).then(function (results) { var result = results.pop(); var contentType = result.contentType; diff --git a/views/mdc/assets/js/components/events.js b/views/mdc/assets/js/components/events.js index <HASH>..<HASH> 100644 --- a/views/mdc/assets/js/components/events.js +++ b/views/mdc/assets/js/components/events.js @@ -49,6 +49,8 @@ export class VEvents { this.vComponent.actionsStarted(this); } + new VErrors(root).clearErrors(); + pseries(fnlist).then((results) => { const result = results.pop(); const contentType = result.contentType;
Clear errors before action chain An action from a plugin is resulting in an error that never gets cleared. Plugins only provide a function so don't have access to the clearErrors() function and it makes sense to clear them when a new chain begins as it will result in new errors or a clean run.
rx_presenters
train
843cfc4cdc9fe9057a24c46ae14ca95eb6b04e7e
diff --git a/confit.py b/confit.py index <HASH>..<HASH> 100644 --- a/confit.py +++ b/confit.py @@ -1073,11 +1073,16 @@ class Filename(Template): Filenames are returned as absolute, tilde-free paths. - Relative paths are relative to the configuration directory (see + Relative paths are relative to the template's `cwd` argument + when it is specified, then the configuration directory (see the `config_dir` method) if they come from a file. Otherwise, they are relative to the current working directory. This helps attain the expected behavior when using command-line options. """ + def __init__(self, default=REQUIRED, cwd=None): + super(Filename, self).__init__(default) + self.cwd = cwd + def value(self, view): path, source = view.first() if not isinstance(path, BASESTRING): @@ -1088,9 +1093,13 @@ class Filename(Template): ) path = os.path.expanduser(STRING(path)) - if not os.path.isabs(path) and source.filename: - # From defaults: relative to the app's directory. - path = os.path.join(view.root().config_dir(), path) + if not os.path.isabs(path): + if self.cwd is not None: + # relative to the template's argument + path = os.path.join(self.cwd, path) + elif source.filename: + # From defaults: relative to the app's directory. + path = os.path.join(view.root().config_dir(), path) return os.path.abspath(path) diff --git a/test/test_valid.py b/test/test_valid.py index <HASH>..<HASH> 100644 --- a/test/test_valid.py +++ b/test/test_valid.py @@ -264,6 +264,11 @@ class StrSeqTest(unittest.TestCase): class FilenameTest(unittest.TestCase): + def test_filename_relative_to_working_dir(self): + config = _root({'foo': 'foo/bar'}) + valid = config['foo'].get(confit.Filename(cwd='/dev/null')) + self.assertEqual(valid, os.path.realpath('/dev/null/foo/bar')) + def test_filename_with_non_file_source(self): config = _root({'foo': 'foo/bar'}) valid = config['foo'].get(confit.Filename())
Add cwd argument to Filename template Allows to specify a path that overrides what the filename is relative to. I use this to make Filenames relative to another configuration field.
sampsyo_confuse
train
fe5ed4e1d07faeb757bc0665f8c126be93451a69
diff --git a/helpers/DateTimeHelper.php b/helpers/DateTimeHelper.php index <HASH>..<HASH> 100644 --- a/helpers/DateTimeHelper.php +++ b/helpers/DateTimeHelper.php @@ -13,8 +13,8 @@ use mpf\base\Helper; class DateTimeHelper extends Helper { - public $days = array('', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday'); - public $months = array('', 'January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December'); + + public $timeFormat = ' H:i'; public function niceDate($date = null, $noInterval = false, $showDayOfWeek = true, $noValueText = '-') { @@ -24,13 +24,14 @@ class DateTimeHelper extends Helper $date = time(); if (is_string($date)) $date = strtotime($date); - $day = $this->translate($this->days[(int)date('N', $date)]); - $month = $this->translate($this->months[(int)date('m', $date)]); + $day = $this->translate(date('l', $date)); + $month = $this->translate(date('F', $date)); if ($showDayOfWeek) { $niceDate = $day . ', ' . date('d', $date) . ' ' . $month . ' ' . date('Y', $date); } else { $niceDate = date('d', $date) . ' ' . $month . ' ' . date('Y', $date); } + $niceDate .= date($this->timeFormat, $date); if ((date('Y-m-d') == date('Y-m-d', $date)) && (!$noInterval)) { if ((date('H') != date('H', $date)) && ((date('H') != (date('H', $date) + 1)) || (date('i') > date('i', $date)))) { $niceDate = date('H') - date('H', $date); @@ -39,7 +40,7 @@ class DateTimeHelper extends Helper } elseif (-1 === $niceDate) { $niceDate = $this->translate('One hour from now'); } elseif ($niceDate < 0) { - $niceDate = str_replace('{x}', $niceDate*-1, $this->translate('{x} hours from now')); + $niceDate = str_replace('{x}', $niceDate * -1, $this->translate('{x} hours from now')); } else { $niceDate = str_replace('{x}', $niceDate, $this->translate('{x} hours ago')); } @@ -60,7 +61,7 @@ class DateTimeHelper extends Helper } elseif (-1 === $niceDate) { $niceDate = $this->translate('One minute from now'); } elseif ($niceDate < 0) { - $niceDate = str_replace('{x}', $niceDate*-1, $this->translate('{x} minute from now')); + $niceDate = str_replace('{x}', $niceDate * -1, $this->translate('{x} minute from now')); } else { $niceDate = str_replace('{x}', $niceDate, $this->translate('{x} minutes ago')); }
Use date function instead of arrays for days and months
mpf-soft_mpf
train
71872eb113d958d1e6d5c6deb46a414d92606fc7
diff --git a/core/src/main/java/com/opentable/logging/JsonLogEncoder.java b/core/src/main/java/com/opentable/logging/JsonLogEncoder.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/com/opentable/logging/JsonLogEncoder.java +++ b/core/src/main/java/com/opentable/logging/JsonLogEncoder.java @@ -67,6 +67,11 @@ public class JsonLogEncoder extends EncoderBase<ILoggingEvent> { if (marker instanceof LogMetadata) { ObjectNode metadataNode = mapper.valueToTree(((LogMetadata) marker).getMetadata()); logLine.setAll(metadataNode); + + for (Object o : ((LogMetadata) marker).getInlines()) { + metadataNode = mapper.valueToTree(o); + logLine.setAll(metadataNode); + } } for (Entry<String, String> e : event.getMDCPropertyMap().entrySet()) { diff --git a/core/src/main/java/com/opentable/logging/LogMetadata.java b/core/src/main/java/com/opentable/logging/LogMetadata.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/com/opentable/logging/LogMetadata.java +++ b/core/src/main/java/com/opentable/logging/LogMetadata.java @@ -13,9 +13,11 @@ */ package com.opentable.logging; +import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; +import java.util.List; import java.util.Map; import org.slf4j.Marker; @@ -27,6 +29,7 @@ import org.slf4j.Marker; public class LogMetadata implements Marker { private static final long serialVersionUID = 1L; private final Map<String, Object> metadata; + private final List<Object> inlines = new ArrayList<>(); private LogMetadata(Map<String, Object> metadata) { this.metadata = metadata; @@ -49,6 +52,22 @@ public class LogMetadata implements Marker { return this; } + /** + * Extend a metadata marker with an arbitrary object's JSON serialized fields. + */ + public LogMetadata andInline(Object embeddedObj) { + inlines.add(embeddedObj); + return this; + } + + public Map<String, Object> getMetadata() { + return metadata; + } + + public List<Object> getInlines() { + return inlines; + } + @Override public String getName() { return "log-marker"; @@ -88,8 +107,4 @@ public class LogMetadata implements Marker { public boolean contains(String name) { return false; } - - public Map<String, Object> getMetadata() { - return metadata; - } } diff --git a/core/src/test/java/com/opentable/logging/LogMetadataTest.java b/core/src/test/java/com/opentable/logging/LogMetadataTest.java index <HASH>..<HASH> 100644 --- a/core/src/test/java/com/opentable/logging/LogMetadataTest.java +++ b/core/src/test/java/com/opentable/logging/LogMetadataTest.java @@ -86,7 +86,7 @@ public class LogMetadataTest { } @Test - public void testLogger() throws Exception + public void testSimpleMetadata() throws Exception { context.getLogger("test").info(LogMetadata.of("metadataTest", "Win!"), "Test {}!", "message"); context.getLogger("test").warn(LogMetadata.of("foo", "bar").and("bar", "baz"), "again", new Throwable()); @@ -95,4 +95,16 @@ public class LogMetadataTest { assertEquals("bar", serializedEvents.get(1).get("foo").textValue()); assertEquals("baz", serializedEvents.get(1).get("bar").textValue()); } + + @Test + public void testObjectMetadata() throws Exception + { + final Object embeddedObj = new Object() { + public String getC() { return "d"; } + }; + context.getLogger("test").info(LogMetadata.of("a", "b").andInline(embeddedObj), ""); + assertEquals(1, serializedEvents.size()); + assertEquals("b", serializedEvents.get(0).get("a").textValue()); + assertEquals("d", serializedEvents.get(0).get("c").textValue()); + } }
Add support for inlining JSON objects
opentable_otj-logging
train
8688c70e3b376dd22024392f8b8c28d2deb4e424
diff --git a/xgraphics/image.go b/xgraphics/image.go index <HASH>..<HASH> 100644 --- a/xgraphics/image.go +++ b/xgraphics/image.go @@ -34,6 +34,7 @@ import ( "github.com/BurntSushi/xgb/xproto" "github.com/BurntSushi/xgbutil" + "github.com/BurntSushi/xgbutil/xwindow" ) // Model for the BGRA color type. @@ -197,6 +198,24 @@ func (im *Image) For(each func(x, y int) BGRA) { } } +// ForExp is like For, but bypasses image.Color types. +// (So it should be faster.) +func (im *Image) ForExp(each func(x, y int) (r, g, b, a uint8)) { + var x, y, i int + var r, g, b, a uint8 + for x = im.Rect.Min.X; x < im.Rect.Max.X; x++ { + for y = im.Rect.Min.Y; y < im.Rect.Max.Y; y++ { + i = im.PixOffset(x, y) + r, g, b, a = each(x, y) + + im.Pix[i+0] = b + im.Pix[i+1] = g + im.Pix[i+2] = r + im.Pix[i+3] = a + } + } +} + // SubImage provides a sub image of Image without copying image data. // N.B. The standard library defines a similar function, but returns an // image.Image. Here, we return xgraphics.Image so that we can use the extra @@ -228,6 +247,22 @@ func (im *Image) PixOffset(x, y int) int { return (y-im.Rect.Min.Y)*im.Stride + (x-im.Rect.Min.X)*4 } +// Window is a convenience function for painting the provided +// Image value to a new window, destroying the pixmap created by that image, +// and returning the window value. +// The window is sized to the dimensions of the image. +func (im *Image) Window(parent xproto.Window) *xwindow.Window { + win := xwindow.Must(xwindow.Create(im.X, parent)) + win.Resize(im.Bounds().Dx(), im.Bounds().Dy()) + + im.XSurfaceSet(win.Id) + im.XDraw() + im.XPaint(win.Id) + im.Destroy() + + return win +} + // BGRA is the representation of color for each pixel in an X pixmap. // BUG(burntsushi): This is hard-coded when it shouldn't be. type BGRA struct {
Add experimental support for a faster 'For'. Also add a convenience method to create an image window from an Image value.
BurntSushi_xgbutil
train