hash
stringlengths
40
40
diff
stringlengths
131
114k
message
stringlengths
7
980
project
stringlengths
5
67
split
stringclasses
1 value
965922c44796ba9d08a274798e67ffd1bedee278
diff --git a/src/Control/LouverControl.php b/src/Control/LouverControl.php index <HASH>..<HASH> 100644 --- a/src/Control/LouverControl.php +++ b/src/Control/LouverControl.php @@ -218,10 +218,9 @@ class LouverControl extends ComplexControl { if ($control!==$this->footerControl) { - // Add class for zebra theme. + $control->addClass(OverviewTable::$class); $control->addClass(($i % 2==0) ? 'even' : 'odd'); - // Generate the table row. $ret .= $control->getHtml(); $i++;
Align with changes in OverviewTable.
SetBased_php-abc-form-louver
train
325f31286dd635953a89775691a5fbee187a32f9
diff --git a/daemon/daemon.go b/daemon/daemon.go index <HASH>..<HASH> 100644 --- a/daemon/daemon.go +++ b/daemon/daemon.go @@ -767,7 +767,7 @@ func NewDaemon(dp datapath.Datapath) (*Daemon, *endpointRestoreState, error) { t, err := trigger.NewTrigger(trigger.Parameters{ Name: "policy_update", PrometheusMetrics: true, - MinInterval: time.Second, + MinInterval: option.Config.PolicyTriggerInterval, TriggerFunc: d.policyUpdateTrigger, }) if err != nil { diff --git a/daemon/daemon_main.go b/daemon/daemon_main.go index <HASH>..<HASH> 100644 --- a/daemon/daemon_main.go +++ b/daemon/daemon_main.go @@ -794,6 +794,10 @@ func init() { flags.String(option.WriteCNIConfigurationWhenReady, "", fmt.Sprintf("Write the CNI configuration as specified via --%s to path when agent is ready", option.ReadCNIConfiguration)) option.BindEnv(option.WriteCNIConfigurationWhenReady) + flags.Duration(option.PolicyTriggerInterval, defaults.PolicyTriggerInterval, "Time between triggers of policy updates (regenerations for all endpoints)") + flags.MarkHidden(option.PolicyTriggerInterval) + option.BindEnv(option.PolicyTriggerInterval) + viper.BindPFlags(flags) } diff --git a/pkg/defaults/defaults.go b/pkg/defaults/defaults.go index <HASH>..<HASH> 100644 --- a/pkg/defaults/defaults.go +++ b/pkg/defaults/defaults.go @@ -267,4 +267,8 @@ const ( // AutoCreateCiliumNodeResource enables automatic creation of a // CiliumNode resource for the local node AutoCreateCiliumNodeResource = false + + // PolicyTriggerInterval is default amount of time between triggers of + // policy updates are invoked. + PolicyTriggerInterval = 1 * time.Second ) diff --git a/pkg/option/config.go b/pkg/option/config.go index <HASH>..<HASH> 100644 --- a/pkg/option/config.go +++ b/pkg/option/config.go @@ -542,6 +542,10 @@ const ( // DeprecatedEnableLegacyServices enables the legacy services DeprecatedEnableLegacyServices = "enable-legacy-services" + + // PolicyTriggerInterval is the amount of time between triggers of policy + // updates are invoked. + PolicyTriggerInterval = "policy-trigger-interval" ) // FQDNS variables @@ -1099,6 +1103,10 @@ type DaemonConfig struct { // EgressMasqueradeInterfaces is the selector used to select interfaces // subject to egress masquerading EgressMasqueradeInterfaces string + + // PolicyTriggerInterval is the amount of time between when policy updates + // are triggered. + PolicyTriggerInterval time.Duration } var ( @@ -1502,6 +1510,7 @@ func (c *DaemonConfig) Populate() { c.Version = viper.GetString(Version) c.Workloads = viper.GetStringSlice(ContainerRuntime) c.WriteCNIConfigurationWhenReady = viper.GetString(WriteCNIConfigurationWhenReady) + c.PolicyTriggerInterval = viper.GetDuration(PolicyTriggerInterval) if nativeCIDR := viper.GetString(IPv4NativeRoutingCIDR); nativeCIDR != "" { c.ipv4NativeRoutingCIDR = cidr.MustParseCIDR(nativeCIDR)
config: make policy trigger duration configurable Maintain the default to be one second. Make this option configurable for testing purposes when testing strain on regenerations of endpoints.
cilium_cilium
train
6fb9b9246e70a2ef638aa75e2c487826c55ff91f
diff --git a/tokenserver/appengine/impl/projectscope/config_validation.go b/tokenserver/appengine/impl/projectscope/config_validation.go index <HASH>..<HASH> 100644 --- a/tokenserver/appengine/impl/projectscope/config_validation.go +++ b/tokenserver/appengine/impl/projectscope/config_validation.go @@ -15,7 +15,9 @@ package projectscope import ( - "go.chromium.org/luci/common/data/stringset" + "sort" + "strings" + "go.chromium.org/luci/common/proto/config" "go.chromium.org/luci/config/validation" ) @@ -39,9 +41,8 @@ func validateSingleIdentityProjectAssignment(ctx *validation.Context, cfg *confi ctx.Enter("identity configuration") defer ctx.Exit() - // Used to validate that projects don't share identities. - identities := stringset.Set{} - entries := 0 + // Service account email => list of project that use it. + idents := map[string][]string{} for _, project := range cfg.Projects { ctx.Enter("Validate project %s IdentityConfig", project.Id) @@ -50,15 +51,23 @@ func validateSingleIdentityProjectAssignment(ctx *validation.Context, cfg *confi // project identities is completed by all customers. if identcfg, valid := validateHasIdentityConfig(ctx, project); valid { validateCanIssueTokenForIdentity(ctx, identcfg.ServiceAccountEmail) - identities.Add(identcfg.ServiceAccountEmail) - entries++ + idents[identcfg.ServiceAccountEmail] = append(idents[identcfg.ServiceAccountEmail], project.Id) } ctx.Exit() } - // Make sure projects don't share identities. - if identities.Len() < entries { - ctx.Errorf("at least two projects sharing the same identity") + // Warn when projects share identities. + var shared []string + for ident, projs := range idents { + if len(projs) > 1 { + shared = append(shared, ident) + } + } + sort.Strings(shared) + for _, ident := range shared { + ctx.Warningf( + "project-scoped account %s is used by multiple projects: %s", + ident, strings.Join(idents[ident], ", ")) } } diff --git a/tokenserver/appengine/impl/projectscope/config_validation_test.go b/tokenserver/appengine/impl/projectscope/config_validation_test.go index <HASH>..<HASH> 100644 --- a/tokenserver/appengine/impl/projectscope/config_validation_test.go +++ b/tokenserver/appengine/impl/projectscope/config_validation_test.go @@ -31,8 +31,8 @@ func TestValidation(t *testing.T) { t.Parallel() cases := []struct { - Cfg string - Errors []string + Cfg string + Warnings []string }{ { // No errors, "normal looking" config. @@ -50,7 +50,7 @@ func TestValidation(t *testing.T) { `, }, { - // Identity double assignment, broken config. + // Identity double assignment, produces a warning. Cfg: ` projects { id: "id1" @@ -63,7 +63,7 @@ func TestValidation(t *testing.T) { } } projects { - id: "id1" + id: "id2" config_location { url: "https://some/repo" storage_type: GITILES @@ -73,8 +73,8 @@ func TestValidation(t *testing.T) { } } `, - Errors: []string{ - `in "projects.cfg" (identity configuration): at least two projects sharing the same identity`, + Warnings: []string{ + `project-scoped account foo@bar.com is used by multiple projects: id1, id2`, }, }, } @@ -92,13 +92,15 @@ func TestValidation(t *testing.T) { validateSingleIdentityProjectAssignment(ctx, cfg) verr := ctx.Finalize() - if len(cs.Errors) == 0 { // no errors expected + if len(cs.Warnings) == 0 { So(verr, ShouldBeNil) } else { verr := verr.(*validation.Error) - So(len(verr.Errors), ShouldEqual, len(cs.Errors)) + So(verr.Errors, ShouldHaveLength, len(cs.Warnings)) for i, err := range verr.Errors { - So(err, ShouldErrLike, cs.Errors[i]) + sev, _ := validation.SeverityTag.In(err) + So(sev, ShouldEqual, validation.Warning) + So(err, ShouldErrLike, cs.Warnings[i]) } } }
[tokenserver] Allow projects to share project-scoped accounts. Still warn though. Chromium wants to use a LUCI project per Chromium milestone. Requiring each of them to have a dedicated project-scoped account is a lot of unnecessary work. R=<EMAIL>, <EMAIL> CC=<EMAIL> Change-Id: I<I>ffbe<I>bf<I>b0c9df<I>e<I> Reviewed-on: <URL>
luci_luci-go
train
9a0ac9f7ab8eaf7631e1308e36dbaa532c510922
diff --git a/expression/builtin_string_vec.go b/expression/builtin_string_vec.go index <HASH>..<HASH> 100644 --- a/expression/builtin_string_vec.go +++ b/expression/builtin_string_vec.go @@ -2051,11 +2051,30 @@ func (b *builtinRpadSig) vecEvalString(input *chunk.Chunk, result *chunk.Column) } func (b *builtinCharLengthBinarySig) vectorized() bool { - return false + return true } func (b *builtinCharLengthBinarySig) vecEvalInt(input *chunk.Chunk, result *chunk.Column) error { - return errors.Errorf("not implemented") + n := input.NumRows() + buf, err := b.bufAllocator.get(types.ETString, n) + if err != nil { + return err + } + defer b.bufAllocator.put(buf) + if err := b.args[0].VecEvalString(b.ctx, input, buf); err != nil { + return err + } + result.ResizeInt64(n, false) + result.MergeNulls(buf) + res := result.Int64s() + for i := 0; i < n; i++ { + if result.IsNull(i) { + continue + } + str := buf.GetString(i) + res[i] = int64(len(str)) + } + return nil } func (b *builtinBinSig) vectorized() bool { diff --git a/expression/builtin_string_vec_test.go b/expression/builtin_string_vec_test.go index <HASH>..<HASH> 100644 --- a/expression/builtin_string_vec_test.go +++ b/expression/builtin_string_vec_test.go @@ -162,6 +162,9 @@ var vecBuiltinStringCases = map[string][]vecExprBenchCase{ }, ast.CharLength: { {retEvalType: types.ETInt, childrenTypes: []types.EvalType{types.ETString}}, + {retEvalType: types.ETInt, childrenTypes: []types.EvalType{types.ETString}, + childrenFieldTypes: []*types.FieldType{{Tp: mysql.TypeString, Flag: mysql.BinaryFlag, Collate: charset.CollationBin}}, + }, }, ast.BitLength: { {retEvalType: types.ETInt, childrenTypes: []types.EvalType{types.ETString}},
expression:implement vectorized evaluation for builtinCharLengthBinarySig (#<I>)
pingcap_tidb
train
2e820386129ff426954680d10b227a7e9e1f38ab
diff --git a/tests/Guzzle/Tests/Service/Builder/ServiceBuilderTest.php b/tests/Guzzle/Tests/Service/Builder/ServiceBuilderTest.php index <HASH>..<HASH> 100644 --- a/tests/Guzzle/Tests/Service/Builder/ServiceBuilderTest.php +++ b/tests/Guzzle/Tests/Service/Builder/ServiceBuilderTest.php @@ -240,5 +240,6 @@ EOT; )); $c = $s->getBuilder('michael.mock')->build(); + $this->assertType('Guzzle\\Tests\\Service\\Mock\\MockClient', $c); } } \ No newline at end of file
[Tests] Ensuring that the correct class was instantiated when using the assumed DefaultBuilder
guzzle_guzzle3
train
a316b46ce24d4cad80af65fc78c6e8b7e0dd5737
diff --git a/synth.py b/synth.py index <HASH>..<HASH> 100644 --- a/synth.py +++ b/synth.py @@ -17,6 +17,9 @@ import synthtool as s import synthtool.gcp as gcp import subprocess +import logging + +logging.basicConfig(level=logging.DEBUG) gapic = gcp.GAPICGenerator() @@ -25,21 +28,11 @@ versions = ['v1', 'v1p1beta1', 'v1p2beta1'] for version in versions: library = gapic.node_library('vision', version) - s.copy(library / 'protos') - s.copy(library / 'src' / version) - s.copy(library / 'samples') - s.copy(library / 'system-test') - s.copy(library / 'test') +s.copy(library, excludes=['src/index.js', 'README.md', 'package.json']) ''' Node.js specific cleanup ''' -# Repo Cleanup/Setup -subprocess.run(['npm', 'install']) - -# Generates scaffolding, enters contributors names -subprocess.run(['npm', 'run', 'generate-scaffolding']) - -# prettify and lint +subprocess.run(['npm', 'ci']) subprocess.run(['npm', 'run', 'prettier']) subprocess.run(['npm', 'run', 'lint'])
synth.py: follow synth scripts from other repos (#<I>) * gen: synth.py v2 * gen: synth.py excludes src/index.js for multi-version
googleapis_nodejs-vision
train
ed99fa0ff4665da270dec5e104dbc74234514bff
diff --git a/lib/releaf/rspec/helpers.rb b/lib/releaf/rspec/helpers.rb index <HASH>..<HASH> 100644 --- a/lib/releaf/rspec/helpers.rb +++ b/lib/releaf/rspec/helpers.rb @@ -49,6 +49,9 @@ module Releaf end def within_dialog(&block) + if first('.dialog form[data-validation=true]') + find('.dialog form[data-validation-initialized=true]') # wait for validation to initialize + end within(".dialog") do yield end
"within_dialog" test helper validation initalization support added
cubesystems_releaf
train
20ae95387b1b058ed50838705868df302732b6ae
diff --git a/fluid-api/pom.xml b/fluid-api/pom.xml index <HASH>..<HASH> 100644 --- a/fluid-api/pom.xml +++ b/fluid-api/pom.xml @@ -59,7 +59,7 @@ <dependency> <groupId>redis.clients</groupId> <artifactId>jedis</artifactId> - <scope>compile</scope> + <scope>provided</scope> </dependency> <!--Google--> @@ -67,7 +67,7 @@ <dependency> <groupId>com.googlecode.xmemcached</groupId> <artifactId>xmemcached</artifactId> - <scope>compile</scope> + <scope>provided</scope> </dependency> <!--Guava--> diff --git a/fluid-api/src/main/java/com/fluidbpm/program/api/vo/ws/WS.java b/fluid-api/src/main/java/com/fluidbpm/program/api/vo/ws/WS.java index <HASH>..<HASH> 100644 --- a/fluid-api/src/main/java/com/fluidbpm/program/api/vo/ws/WS.java +++ b/fluid-api/src/main/java/com/fluidbpm/program/api/vo/ws/WS.java @@ -176,6 +176,7 @@ public class WS { public static final String INCLUDE_DESCENDANTS = "include_descendants"; public static final String INCLUDE_FORM_PROPERTIES = "include_form_properties"; public static final String LOCK_FOR_USER_ID = "lock_for_user_id"; + public static final String ADD_TO_PERSONAL_INVENTORY = "add_to_personal_inventory"; //Remove from Personal Inventory... public static final String REMOVE_FROM_PERSONAL_INVENTORY = "remove_from_personal_inventory"; @@ -210,8 +211,15 @@ public class WS { * * @return {@code v1/form_container/} */ - public static final String formContainerCreate() { - return Version.VERSION_1.concat(ROOT).concat(CREATE); + public static final String formContainerCreate(boolean addToPersonalInventory) { + String returnVal = Version.VERSION_1.concat(ROOT).concat(CREATE); + + returnVal += "?"; + returnVal += QueryParam.ADD_TO_PERSONAL_INVENTORY; + returnVal += "="; + returnVal += addToPersonalInventory; + + return returnVal; } /** @@ -220,7 +228,6 @@ public class WS { * @return {@code v1/form_container/update} */ public static final String formContainerUpdate() { - return Version.VERSION_1.concat(ROOT).concat(UPDATE); } diff --git a/fluid-ws-java-client/src/main/java/com/fluidbpm/ws/client/v1/form/FormContainerClient.java b/fluid-ws-java-client/src/main/java/com/fluidbpm/ws/client/v1/form/FormContainerClient.java index <HASH>..<HASH> 100644 --- a/fluid-ws-java-client/src/main/java/com/fluidbpm/ws/client/v1/form/FormContainerClient.java +++ b/fluid-ws-java-client/src/main/java/com/fluidbpm/ws/client/v1/form/FormContainerClient.java @@ -53,28 +53,46 @@ public class FormContainerClient extends ABaseClientWS { * @param serviceTicketParam The Server issued Service Ticket. */ public FormContainerClient( - String endpointBaseUrlParam, - String serviceTicketParam) { + String endpointBaseUrlParam, String serviceTicketParam + ) { super(endpointBaseUrlParam); - this.setServiceTicket(serviceTicketParam); } /** * Create a new Form Container / Electronic Forms. * - * @param formParam The Form to create. + * @param form The Form to create. + * @param addToPersonalInventory Should the form be added to the users P/I after creation. + * * @return Created Form Container / Electronic Form. * * @see Field */ - public Form createFormContainer(Form formParam) { - if (formParam != null && this.serviceTicket != null) { - formParam.setServiceTicket(this.serviceTicket); + public Form createFormContainer( + Form form, + boolean addToPersonalInventory + ) { + if (form != null && this.serviceTicket != null) { + form.setServiceTicket(this.serviceTicket); } return new Form(this.putJson( - formParam, WS.Path.FormContainer.Version1.formContainerCreate())); + form, + WS.Path.FormContainer.Version1.formContainerCreate(addToPersonalInventory))); + } + + /** + * Create a new Form Container / Electronic Forms. + * The Form will not be added to the P/I. + * + * @param form The Form to create. + * @return Created Form Container / Electronic Form. + * + * @see Field + */ + public Form createFormContainer(Form form) { + return this.createFormContainer(form, false); } /**
Add formContainerCreate with adding to personal inventory.
Koekiebox-PTY-LTD_Fluid
train
f19b37f0c467dd2cf4103f214c3ce8aa33bf162b
diff --git a/src/Parser/State.php b/src/Parser/State.php index <HASH>..<HASH> 100644 --- a/src/Parser/State.php +++ b/src/Parser/State.php @@ -69,6 +69,8 @@ class State */ protected $new_statement_character_found = false; + protected $valid_placeholder_characters = []; + /** * * Constructor @@ -90,6 +92,12 @@ class State if (array_key_exists(0, $this->values)) { array_unshift($this->values, null); } + $this->valid_placeholder_characters = array_merge( + range('a', 'z'), + range ('A', 'Z'), + range (0, 9), + ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '_'] + ); } /** @@ -314,7 +322,20 @@ class State */ public function getIdentifier() { - return $this->capture('\\w+\\b'); + $identifier = ''; + $length = 0; + while (! $this->done()) + { + $character = mb_substr($this->statement, $this->current_index + $length, 1, $this->charset); + if (! in_array($character, $this->valid_placeholder_characters, true)) + { + return $identifier; + } + $identifier .= $character; + $length++; + + } + return $identifier; } /**
Removed the call to State->capture from State->getIdentifier
auraphp_Aura.Sql
train
8a1637f8f52a4417d597cbccee15b98edf6d48b7
diff --git a/lib/fog/vcloud/models/compute/server.rb b/lib/fog/vcloud/models/compute/server.rb index <HASH>..<HASH> 100644 --- a/lib/fog/vcloud/models/compute/server.rb +++ b/lib/fog/vcloud/models/compute/server.rb @@ -31,6 +31,11 @@ module Fog def tags Fog::Vcloud::Compute::Tags.new(:service => service, :href => href + '/metadata') end + + def customization_script + load_unless_loaded! + self.guest_customization[:CustomizationScript] + end def computer_name load_unless_loaded!
shows customizationScript of a VM
fog_fog
train
6287c21ade4dac0cb597ee7e303e7ee08652db96
diff --git a/sitemetrics/__init__.py b/sitemetrics/__init__.py index <HASH>..<HASH> 100644 --- a/sitemetrics/__init__.py +++ b/sitemetrics/__init__.py @@ -1,4 +1,4 @@ -VERSION = (0, 3, 0) +VERSION = (0, 4, 0) # TODO api docs # TODO tests
Version number is bumped up.
idlesign_django-sitemetrics
train
bb32cb1a079981bd61b7114d268ba64db4b6f015
diff --git a/pysat/utils/testing.py b/pysat/utils/testing.py index <HASH>..<HASH> 100644 --- a/pysat/utils/testing.py +++ b/pysat/utils/testing.py @@ -7,6 +7,7 @@ import numpy as np + def assert_list_contains(small_list, big_list, test_nan=False, test_case=True): """Assert all elements of one list exist within the other list.
STY: added extra whitespace Added missing blank line.
rstoneback_pysat
train
6ba089abeab5bdfb99dcd476387bb2083627f2fc
diff --git a/package.json b/package.json index <HASH>..<HASH> 100644 --- a/package.json +++ b/package.json @@ -67,8 +67,8 @@ "@4c/rollout": "^1.2.0", "@babel/cli": "^7.1.0", "@babel/core": "^7.1.5", - "@storybook/addon-actions": "^3.4.11", - "@storybook/react": "3.4.11", + "@storybook/addon-actions": "^4.1.7", + "@storybook/react": "^4.1.7", "babel-core": "^7.0.0-bridge.0", "babel-eslint": "^10.0.1", "babel-jest": "^23.6.0", diff --git a/stories/Calendar.js b/stories/Calendar.js index <HASH>..<HASH> 100644 --- a/stories/Calendar.js +++ b/stories/Calendar.js @@ -1,4 +1,6 @@ -import { storiesOf, action } from '@storybook/react' +import { storiesOf } from '@storybook/react' +import { action } from '@storybook/addon-actions' + import moment from 'moment' import React from 'react' diff --git a/stories/DragAndDrop.js b/stories/DragAndDrop.js index <HASH>..<HASH> 100644 --- a/stories/DragAndDrop.js +++ b/stories/DragAndDrop.js @@ -1,5 +1,6 @@ import React from 'react' -import { storiesOf, action } from '@storybook/react' +import { storiesOf } from '@storybook/react' +import { action } from '@storybook/addon-actions' import { events, Calendar, DragAndDropCalendar } from './helpers' import customComponents from './helpers/customComponents' diff --git a/stories/Timeslots.js b/stories/Timeslots.js index <HASH>..<HASH> 100644 --- a/stories/Timeslots.js +++ b/stories/Timeslots.js @@ -1,4 +1,5 @@ -import { storiesOf, action } from '@storybook/react' +import { storiesOf } from '@storybook/react' +import { action } from '@storybook/addon-actions' import moment from 'moment' import React from 'react' diff --git a/stories/helpers/customComponents.js b/stories/helpers/customComponents.js index <HASH>..<HASH> 100644 --- a/stories/helpers/customComponents.js +++ b/stories/helpers/customComponents.js @@ -1,5 +1,5 @@ import React from 'react' -import { action } from '@storybook/react' +import { action } from '@storybook/addon-actions' const customComponents = { dateCellWrapper: dateCellWrapperProps => {
chore: update storybook (#<I>) Fixes issue described in <URL>
intljusticemission_react-big-calendar
train
84f6002088bb4cc6c210463a76d55d473a1a637a
diff --git a/modeshape-jcr/src/test/java/org/modeshape/jcr/LocalIndexProviderTest.java b/modeshape-jcr/src/test/java/org/modeshape/jcr/LocalIndexProviderTest.java index <HASH>..<HASH> 100644 --- a/modeshape-jcr/src/test/java/org/modeshape/jcr/LocalIndexProviderTest.java +++ b/modeshape-jcr/src/test/java/org/modeshape/jcr/LocalIndexProviderTest.java @@ -1211,4 +1211,29 @@ public class LocalIndexProviderTest extends AbstractIndexProviderTest { assertTrue(indexManager().getIndexNames("missing", "default", IndexManager.IndexStatus.ENABLED).isEmpty()); assertTrue(indexManager().getIndexNames(LOCAL_PROVIDER_NAME, "missing", IndexManager.IndexStatus.ENABLED).isEmpty()); } + + @Test + @FixFor( "MODE-2498") + public void shouldSelectCorrectIndexWhenMultipleIndexesUseTheSameAncestorProperty() throws Exception { + registerNodeType("mix:custom", true, true, "mix:title"); + registerNodeType("mix:custom2", true, true, "mix:title"); + registerValueIndex("custom_names", "mix:custom", null, "*", "jcr:name", PropertyType.NAME); + registerValueIndex("custom2_names", "mix:custom2", null, "*", "jcr:name", PropertyType.NAME); + + //print = true; + + // Add a node that uses this type ... + Node root = session().getRootNode(); + Node book1 = root.addNode("myFirstBook"); + book1.addMixin("mix:custom"); + book1.setProperty("jcr:title", "The Title"); + + waitForIndexes(); + session.save(); + waitForIndexes(); + + // Compute a query plan that should use this index ... + Query query = jcrSql2Query("SELECT * FROM [mix:custom] as custom where custom.[jcr:name] = 'myFirstBook'"); + validateQuery().rowCount(1L).validate(query, query.execute()); + } }
MODE-<I> - Added unit test to demonstrate issue
ModeShape_modeshape
train
310ddec823cbeadf694c396b27b5610474f05bcc
diff --git a/registry/registry_mock_test.go b/registry/registry_mock_test.go index <HASH>..<HASH> 100644 --- a/registry/registry_mock_test.go +++ b/registry/registry_mock_test.go @@ -330,7 +330,7 @@ func TestPing(t *testing.T) { /* Uncomment this to test Mocked Registry locally with curl * WARNING: Don't push on the repos uncommented, it'll block the tests - */ + * func TestWait(t *testing.T) { log.Println("Test HTTP server ready and waiting:", testHttpServer.URL) c := make(chan int)
Disabled test server in the tests
containers_storage
train
14b39a826ee72bfbe65b2485e33d5ce139c79c1f
diff --git a/content/js/site.js b/content/js/site.js index <HASH>..<HASH> 100644 --- a/content/js/site.js +++ b/content/js/site.js @@ -1,7 +1,7 @@ (function () { 'use strict'; - var clipboard = new Clipboard('.btn'); + var clipboard = new Clipboard('.copy-code'); clipboard.on('success', function(e) { e.clearSelection();
Instantiate Clipboard.js w/ proper class. 📎
Pier1_rocketbelt
train
1780558b07980c301ab72aab3b817e63f9493445
diff --git a/config_resolver/core.py b/config_resolver/core.py index <HASH>..<HASH> 100644 --- a/config_resolver/core.py +++ b/config_resolver/core.py @@ -25,6 +25,7 @@ LookupMetadata = namedtuple('LookupMetadata', [ 'loaded_files', 'config_id' ]) +FileReadability = namedtuple('FileReadability', 'is_readable filename reason') def from_string(data): @@ -166,9 +167,9 @@ def find_files(config_id, search_path=None, filename=None, version=None, secure= # which files we loaded in order to inform the user. for dirname in path: conf_name = join(dirname, config_filename) - readable = is_readable(config_id, conf_name, version=version, - secure=secure) - yield conf_name, readable + readability = is_readable(config_id, conf_name, version=version, + secure=secure) + yield conf_name, readability.is_readable def effective_filename(config_id, custom_filename): @@ -209,11 +210,12 @@ def is_readable(config_id, filename, version=None, secure=False): if not exists(filename): log.debug('Skipping %s (File not found).', filename) - return False + return FileReadability(False, filename, 'File not found') log.debug('Checking if %s is readable.', filename) insecure_readable = True file_version = None + unreadable_reason = '<unknown>' # Check if the file is version-compatible with this instance. new_config = ConfigParser() @@ -241,28 +243,31 @@ def is_readable(config_id, filename, version=None, secure=False): major, minor, _ = StrictVersion(file_version).version expected_major, expected_minor, _ = version.version if expected_major != major: + msg = 'Invalid major version number in %r. Expected %r, got %r!' log.error( - 'Invalid major version number in %r. Expected %r, got %r!', + msg, abspath(filename), str(version), file_version) insecure_readable = False + unreadable_reason = msg elif expected_minor != minor: + msg = 'Mismatching minor version number in %r. Expected %r, got %r!' log.warning( - 'Mismatching minor version number in %r. ' - 'Expected %r, got %r!', + msg, abspath(filename), str(version), file_version) insecure_readable = True + unreadable_reason = msg if insecure_readable and secure: mode = get_stat(filename).st_mode if (mode & stat.S_IRGRP) or (mode & stat.S_IROTH): msg = "File %r is not secure enough. Change it's mode to 600" log.warning(msg, filename) - return False - return insecure_readable + return FileReadability(False, filename, msg) + return FileReadability(insecure_readable, filename, unreadable_reason) class Config(ConfigParser): # pylint: disable = too-many-ancestors
Add "reason" to file readability
exhuma_config_resolver
train
3976ed024df68a51589d723702e5db8f51da4fe1
diff --git a/enamdict.js b/enamdict.js index <HASH>..<HASH> 100644 --- a/enamdict.js +++ b/enamdict.js @@ -20,13 +20,11 @@ var lineRegex = /^([^ ]+) \[([^\]]+)\] \/(.*?) \([^\)]*?\b([ugfms])\b[^\)]*?\).* // Data cache var byRomaji = {}; -// TODO: Need to add a simplified lookup where all long vowels are reduced +// A simplified lookup where all long vowels are reduced // Use it as a backup when nothing else is found var backupRomaji = []; module.exports = { - byRomaji: byRomaji, - init: function(stream, callback) { if (arguments.length < 2) { callback = stream; @@ -49,7 +47,6 @@ module.exports = { find: function(romaji) { romaji = romaji.toLowerCase(); - // TODO: Clean up accents, convert to oo, etc.? if (romaji in byRomaji) { return new Entries(byRomaji[romaji]);
Remove some debugging stuff, tweak some comments.
jeresig_node-enamdict
train
be663b8596fc3e3d02cb5716db1d638788a0230e
diff --git a/pkg_resources/__init__.py b/pkg_resources/__init__.py index <HASH>..<HASH> 100644 --- a/pkg_resources/__init__.py +++ b/pkg_resources/__init__.py @@ -791,8 +791,8 @@ class WorkingSet(object): # key -> dist best = {} to_activate = [] - # Map requirement to the extras that require it - extra_req_mapping = {} + + req_extras = _ReqExtras() # Mapping of requirement to set of distributions that required it; # useful for reporting info about conflicts. @@ -805,7 +805,7 @@ class WorkingSet(object): # Ignore cyclic or redundant dependencies continue - if not self._markers_pass(req, extra_req_mapping): + if not req_extras.markers_pass(req): continue dist = best.get(req.key) @@ -840,33 +840,13 @@ class WorkingSet(object): # Register the new requirements needed by req for new_requirement in new_requirements: required_by[new_requirement].add(req.project_name) - extra_req_mapping[new_requirement] = req.extras + req_extras[new_requirement] = req.extras processed[req] = True # return list of distros to activate return to_activate - @staticmethod - def _markers_pass(req, extra_req_mapping): - """ - Return False if the req has a marker and fails - evaluation. Otherwise, return True. - - extra_req_mapping is a map of requirements to - extras. - """ - if not req.marker: - return True - - result = [] - if req in extra_req_mapping: - for extra in extra_req_mapping[req] or ['']: - result.append(req.marker.evaluate({'extra': extra})) - else: - result.append(req.marker.evaluate()) - return any(result) - def find_plugins(self, plugin_env, full_env=None, installer=None, fallback=True): """Find all activatable distributions in `plugin_env` @@ -993,6 +973,31 @@ class WorkingSet(object): self.callbacks = callbacks[:] +class _ReqExtras(dict): + """ + Map each requirement to the extras that demanded it. + """ + + def markers_pass(self, req): + """ + Evaluate markers for req against each extra that + demanded it. + + Return False if the req has a marker and fails + evaluation. Otherwise, return True. + """ + if not req.marker: + return True + + result = [] + if req in self: + for extra in self[req] or ['']: + result.append(req.marker.evaluate({'extra': extra})) + else: + result.append(req.marker.evaluate()) + return any(result) + + class Environment(object): """Searchable snapshot of distributions on a search path""" diff --git a/pkg_resources/tests/test_resources.py b/pkg_resources/tests/test_resources.py index <HASH>..<HASH> 100644 --- a/pkg_resources/tests/test_resources.py +++ b/pkg_resources/tests/test_resources.py @@ -187,10 +187,25 @@ class TestDistro: assert list(res) == [Foo] def test_environment_marker_evaluation_called(self): - ws = WorkingSet([]) - req, = parse_requirements("bar;python_version<'4'") - extra_req_mapping = {req: ()} - assert ws._markers_pass(req, extra_req_mapping) == True + """ + If one package foo requires bar without any extras, + markers should pass for bar. + """ + parent_req, = parse_requirements("foo") + req, = parse_requirements("bar;python_version>='2'") + req_extras = pkg_resources._ReqExtras({req: parent_req.extras}) + assert req_extras.markers_pass(req) + + parent_req, = parse_requirements("foo[]") + req, = parse_requirements("bar;python_version>='2'") + req_extras = pkg_resources._ReqExtras({req: parent_req.extras}) + assert req_extras.markers_pass(req) + + # this is a little awkward; I would want this to fail + parent_req, = parse_requirements("foo") + req, = parse_requirements("bar;python_version>='2' and extra==''") + req_extras = pkg_resources._ReqExtras({req: parent_req.extras}) + assert req_extras.markers_pass(req) def test_marker_evaluation_with_extras(self): """Extras are also evaluated as markers at resolution time."""
Extract _ReqExtras to encapsulate that functionality and decouple it from WorkingSet.
pypa_setuptools
train
67a5d686ec40e7b9df8f2d4413f26fc0d32493ff
diff --git a/ashley/src/com/badlogic/ashley/core/Engine.java b/ashley/src/com/badlogic/ashley/core/Engine.java index <HASH>..<HASH> 100644 --- a/ashley/src/com/badlogic/ashley/core/Engine.java +++ b/ashley/src/com/badlogic/ashley/core/Engine.java @@ -324,7 +324,6 @@ public class Engine { } listeners.end(); notifying = false; - processPendingEntityOperations(); } private void notifyFamilyListenersAdd(Family family, Entity entity) {
removed another call to processPendingEntityOperations (thanks @Lusito)
libgdx_ashley
train
cf706f4ca8a02ed458b406b8a3bc412e592ff17f
diff --git a/bio/sifts.py b/bio/sifts.py index <HASH>..<HASH> 100644 --- a/bio/sifts.py +++ b/bio/sifts.py @@ -11,12 +11,13 @@ import os import xml from xml.sax import parse as parse_xml -from tools.fs.io import read_file, write_file, safe_gz_unzip +from tools.fs.fsio import read_file, write_file, safe_gz_unzip from tools.comms.ftp import get_insecure_resource, FTPException550 from tools import colortext import rcsb from pdb import PDB#, cases_with_ACE_residues_we_can_ignore -from basics import SequenceMap, residue_type_3to1_map, protonated_residue_type_3to1_map, non_canonical_amino_acids +from basics import PDBUniParcSequenceMap, residue_type_3to1_map, protonated_residue_type_3to1_map, non_canonical_amino_acids +from uniprot import uniprot_map # Methods @@ -101,14 +102,15 @@ class SIFTSResidue(object): class SIFTS(xml.sax.handler.ContentHandler): - def __init__(self, xml_contents, pdb_contents, acceptable_sequence_percentage_match = 70.0): + def __init__(self, xml_contents, pdb_contents, acceptable_sequence_percentage_match = 70.0, cache_dir = None): '''The PDB contents should be passed so that we can deal with HETATM records as the XML does not contain the necessary information.''' - self.atom_to_uniparc_sequence_maps = {} # UniProt AC -> SequenceMap(PDB ResidueID -> UniParc sequence index) where the UniParc sequence index is 1-based (first element has index 1) + self.atom_to_uniparc_sequence_map = {} # UniProt AC -> SequenceMap(PDB ResidueID -> UniParc sequence index) where the UniParc sequence index is 1-based (first element has index 1) self.counters = {} self.pdb_id = None self.acceptable_sequence_percentage_match = acceptable_sequence_percentage_match self.tag_data = [] + self.cache_dir = cache_dir self.modified_residues = PDB(pdb_contents).modified_residues @@ -160,7 +162,7 @@ class SIFTS(xml.sax.handler.ContentHandler): xml_contents = safe_gz_unzip(xml_contents) # Return the object - handler = SIFTS(xml_contents, pdb_contents, acceptable_sequence_percentage_match = acceptable_sequence_percentage_match) + handler = SIFTS(xml_contents, pdb_contents, acceptable_sequence_percentage_match = acceptable_sequence_percentage_match, cache_dir = cache_dir) xml.sax.parseString(xml_contents, handler) return handler @@ -287,8 +289,20 @@ class SIFTS(xml.sax.handler.ContentHandler): residue_count = 0 residues_matched = 0 - residue_maps = {} + residue_map = {} residues_encountered = set() + + UniProtACs = set() + for r in self.residues: + UniProtACs.add(r.UniProtAC) + print(UniProtACs) + ACC_to_UPARC_mapping = uniprot_map('ACC', 'UPARC', list(UniProtACs), cache_dir = self.cache_dir) + assert(ACC_to_UPARC_mapping.keys() == list(UniProtACs)) + for k, v in ACC_to_UPARC_mapping.iteritems(): + assert(len(v) == 1) + ACC_to_UPARC_mapping[k] = v[0] + print(ACC_to_UPARC_mapping) + for r in self.residues: if not(r.PDBResidueID.isalnum() and int(r.PDBResidueID.isalnum()) < 0): @@ -298,9 +312,9 @@ class SIFTS(xml.sax.handler.ContentHandler): # Store the PDB->UniProt mapping UniProtAC = r.UniProtAC + UniParcID = ACC_to_UPARC_mapping[UniProtAC] full_pdb_residue_ID = r.get_pdb_residue_id() - residue_maps[UniProtAC] = residue_maps.get(UniProtAC, {}) - residue_maps[UniProtAC][full_pdb_residue_ID] = r.UniProtResidueIndex + residue_map[full_pdb_residue_ID] = (UniParcID, r.UniProtResidueIndex) # Make sure we only have at most one match per PDB residue assert(full_pdb_residue_ID not in residues_encountered) @@ -314,9 +328,7 @@ class SIFTS(xml.sax.handler.ContentHandler): residue_count += 1 # Create the SequenceMaps - self.atom_to_uniparc_sequence_maps = {} - for UniProtAC, atom_uniparc_mapping in residue_maps.iteritems(): - self.atom_to_uniparc_sequence_maps[UniProtAC] = SequenceMap.from_dict(atom_uniparc_mapping) + self.atom_to_uniparc_sequence_map = PDBUniParcSequenceMap.from_dict(residue_map) # Check the match percentage if residue_count == 0:
The SIFTS mapping now maps PDB ATOM IDs to (UniParcID, index-in-sequence) pairs.
Kortemme-Lab_klab
train
8d06e9d4a2f3cc559df9a2ab92330e75f1f1b570
diff --git a/test/com/opera/core/systems/OperaScopePreferencesTest.java b/test/com/opera/core/systems/OperaScopePreferencesTest.java index <HASH>..<HASH> 100644 --- a/test/com/opera/core/systems/OperaScopePreferencesTest.java +++ b/test/com/opera/core/systems/OperaScopePreferencesTest.java @@ -22,6 +22,7 @@ import com.opera.core.systems.preferences.OperaPreferences; import com.opera.core.systems.preferences.OperaScopePreferences; import org.junit.Before; +import org.junit.BeforeClass; import org.junit.Test; import static org.junit.Assert.assertEquals; @@ -29,12 +30,14 @@ import static org.junit.Assert.assertTrue; public class OperaScopePreferencesTest extends OperaDriverTestCase { - public static final - OperaScopePreferences - preferences = - (OperaScopePreferences) driver.preferences(); + public static OperaScopePreferences preferences; public static int prefCountBefore = 0; + @BeforeClass + public void beforeAll() { + preferences = driver.preferences(); + } + @Before public void beforeEach() { prefCountBefore = preferences.size();
Ensuring driver has been created first
operasoftware_operaprestodriver
train
150c7b32473ee312c1d047ddced5229d45cdaccf
diff --git a/core/src/main/java/dagger/internal/SetBinding.java b/core/src/main/java/dagger/internal/SetBinding.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/dagger/internal/SetBinding.java +++ b/core/src/main/java/dagger/internal/SetBinding.java @@ -48,7 +48,7 @@ public final class SetBinding<T> extends Binding<Set<T>> { } } - private final Set<Binding<?>> contributors = new LinkedHashSet<Binding<?>>(); + private final Set<Binding<?>> contributors; /** * Creates a new {@code SetBinding} with the given "provides" key, and the requiredBy object @@ -56,6 +56,7 @@ public final class SetBinding<T> extends Binding<Set<T>> { */ public SetBinding(String key, Object requiredBy) { super(key, null, false, requiredBy); + contributors = new LinkedHashSet<Binding<?>>(); } /** @@ -64,7 +65,9 @@ public final class SetBinding<T> extends Binding<Set<T>> { */ public SetBinding(SetBinding<T> original) { super(original.provideKey, null, false, original.requiredBy); - contributors.addAll(original.contributors); + this.setLibrary(original.library()); + this.setDependedOn(original.dependedOn()); + contributors = new LinkedHashSet<Binding<?>>(original.contributors); } @Override public void attach(Linker linker) {
Propagate SetBindings settings in the copy constructor.
square_dagger
train
f9abdef6708b3775ee3fc9a2dc364ff1adb1834d
diff --git a/eZ/Publish/API/Repository/Tests/SearchServiceTest.php b/eZ/Publish/API/Repository/Tests/SearchServiceTest.php index <HASH>..<HASH> 100644 --- a/eZ/Publish/API/Repository/Tests/SearchServiceTest.php +++ b/eZ/Publish/API/Repository/Tests/SearchServiceTest.php @@ -440,7 +440,7 @@ class SearchServiceTest extends BaseTest array( new Query( array( - 'criterion' => new Criterion\Field( + 'filter' => new Criterion\Field( 'name', Criterion\Operator::EQ, 'members' @@ -453,7 +453,7 @@ class SearchServiceTest extends BaseTest array( new Query( array( - 'criterion' => new Criterion\Field( + 'filter' => new Criterion\Field( 'name', Criterion\Operator::EQ, 'Members' @@ -466,7 +466,7 @@ class SearchServiceTest extends BaseTest array( new Query( array( - 'criterion' => new Criterion\Field( + 'filter' => new Criterion\Field( 'name', Criterion\Operator::EQ, 'MEMBERS'
Updated property name after merge with master
ezsystems_ezpublish-kernel
train
75ead813b4335bab2464b6af0fb776c3d746242f
diff --git a/lib/gitlab/client/notes.rb b/lib/gitlab/client/notes.rb index <HASH>..<HASH> 100644 --- a/lib/gitlab/client/notes.rb +++ b/lib/gitlab/client/notes.rb @@ -36,9 +36,11 @@ class Gitlab::Client # # @param [Integer] project The ID of a project. # @param [Integer] snippet The ID of a snippet. + # @option options [Integer] :page The page number. + # @option options [Integer] :per_page The number of results per page. # @return [Array<Gitlab::ObjectifiedHash>] - def snippet_notes(project, snippet) - get("/projects/#{project}/snippets/#{snippet}/notes") + def snippet_notes(project, snippet, options={}) + get("/projects/#{project}/snippets/#{snippet}/notes", :query => options) end # Gets a single wall note.
Added :page and :per_page query options to snippet_notes method
NARKOZ_gitlab
train
b4788e69667f22c6d18eb9a4cde33d48b109b50a
diff --git a/test/src/main/java/org/jvnet/hudson/test/HudsonTestCase.java b/test/src/main/java/org/jvnet/hudson/test/HudsonTestCase.java index <HASH>..<HASH> 100644 --- a/test/src/main/java/org/jvnet/hudson/test/HudsonTestCase.java +++ b/test/src/main/java/org/jvnet/hudson/test/HudsonTestCase.java @@ -951,6 +951,18 @@ public abstract class HudsonTestCase extends TestCase implements RootAction { } /** + * Asserts that the console output of the build does not contain the given substring. + */ + public void assertLogNotContains(String substring, Run run) throws Exception { + String log = getLog(run); + if(!log.contains(substring)) + return; // good! + + System.out.println(log); + fail("Console output of "+run+" contain "+substring); + } + + /** * Get entire log file (this method is deprecated in hudson.model.Run, * but in tests it is OK to load entire log). */
added assertNotContain mehtod.
jenkinsci_jenkins
train
c1a116e03582053169426cb25fc0258e90d6fcf8
diff --git a/src/Instructions/Mapping/Base/Mapping.php b/src/Instructions/Mapping/Base/Mapping.php index <HASH>..<HASH> 100644 --- a/src/Instructions/Mapping/Base/Mapping.php +++ b/src/Instructions/Mapping/Base/Mapping.php @@ -30,10 +30,10 @@ abstract class Mapping extends Instruction implements MappingInterface { $value = $scaffold->getInput($this->input_name); if (null !== $value) { - $this->applyInternal($value, $scaffold); if ($this->is_exclusive) { $scaffold->excludeInput($this->input_name); } + $this->applyInternal($value, $scaffold); } elseif(null !== $this->default) { $this->applyInternal($this->default, $scaffold); }
excluding input values for exclusive mapping before applying
Nayjest_Builder
train
b51443458197dc3b56f917143991460ad410f502
diff --git a/client.go b/client.go index <HASH>..<HASH> 100644 --- a/client.go +++ b/client.go @@ -2497,7 +2497,9 @@ func (me *Client) pieceChanged(t *torrent, piece int) { } else { me.onFailedPiece(t, piece) } - t.updatePiecePriority(piece) + if t.updatePiecePriority(piece) { + t.piecePriorityChanged(piece) + } t.publishPieceChange(piece) } diff --git a/torrent.go b/torrent.go index <HASH>..<HASH> 100644 --- a/torrent.go +++ b/torrent.go @@ -860,6 +860,8 @@ func (t *torrent) updatePiecePriority(piece int) bool { return true } +// Update all piece priorities in one hit. This function should have the same +// output as updatePiecePriority, but across all pieces. func (t *torrent) updatePiecePriorities() { newPrios := make([]piecePriority, t.numPieces()) t.pendingPieces.IterTyped(func(piece int) (more bool) { @@ -875,8 +877,10 @@ func (t *torrent) updatePiecePriorities() { } return true }) - // TODO: Do I need a pass suppressing stuff that we already have? for i, prio := range newPrios { + if t.pieceComplete(i) { + prio = PiecePriorityNone + } if prio != t.Pieces[i].priority { t.Pieces[i].priority = prio t.piecePriorityChanged(i)
Trigger piece request order changes on connections in a few spots The client was requesting already obtained data in certain circumstances. This fixes it.
anacrolix_torrent
train
605911c215505c38b6e76fe66d77d009b0f4a4ed
diff --git a/src/main/java/mServer/crawler/sender/ard/json/ArdMediaArrayToDownloadUrlsConverter.java b/src/main/java/mServer/crawler/sender/ard/json/ArdMediaArrayToDownloadUrlsConverter.java index <HASH>..<HASH> 100644 --- a/src/main/java/mServer/crawler/sender/ard/json/ArdMediaArrayToDownloadUrlsConverter.java +++ b/src/main/java/mServer/crawler/sender/ard/json/ArdMediaArrayToDownloadUrlsConverter.java @@ -12,6 +12,7 @@ import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Collectors; import java.util.stream.StreamSupport; import mServer.crawler.sender.MediathekReader; @@ -165,7 +166,9 @@ public class ArdMediaArrayToDownloadUrlsConverter { } private Map<Qualities, URL> extractRelevantUrls() { - final Map<Qualities, URL> downloadUrls = new HashMap<>(); + final Map<Qualities, URL> downloadUrls = new EnumMap<>(Qualities.class); + + removeAutoM3u8IfMp4Exists(); urls.entrySet().stream() .filter(entry -> !entry.getValue().isEmpty()) @@ -174,9 +177,58 @@ public class ArdMediaArrayToDownloadUrlsConverter { entry -> { finalizeUrl(entry).ifPresent(url -> downloadUrls.put(entry.getKey(), url)); }); + + // add lowest HD-Url as NORMAL if normal is not present + if (!downloadUrls.containsKey(Qualities.NORMAL) && urls.containsKey(Qualities.HD)) { + Optional<URL> normalUrl = determineNormalUrlFromHd(urls.get(Qualities.HD)); + normalUrl.ifPresent(url -> downloadUrls.put(Qualities.NORMAL, url)); + } + return downloadUrls; } + // removes m3u8-url with quality=auto if at least one mp4 url exists + // otherwise m3u8-url could be the normal url while small+hd contains mp4-urls + private void removeAutoM3u8IfMp4Exists() { + AtomicBoolean existsMp4 = new AtomicBoolean(false); + + urls.values().forEach(set -> + set.forEach(value -> { + final Optional<String> fileType = UrlUtils.getFileType(value.getUrl()); + if (fileType.isPresent() && "mp4".equalsIgnoreCase(fileType.get())) { + existsMp4.set(true); + } + })); + + if(existsMp4.get() && urls.containsKey(Qualities.NORMAL)) { + urls.get(Qualities.NORMAL).removeIf(urlInfo -> urlInfo.getQuality().equalsIgnoreCase("auto")); + } + } + + private Optional<URL> determineNormalUrlFromHd(Set<ArdFilmUrlInfoDto> ardFilmUrlInfoDtos) { + ArdFilmUrlInfoDto relevantInfo = null; + + for (final ArdFilmUrlInfoDto info : ardFilmUrlInfoDtos) { + if (info.getWidth() > 0 && info.getHeight() > 0) { + if (relevantInfo == null) { + relevantInfo = info; + } else if (relevantInfo.getQuality().compareTo(info.getQuality()) > 0) { + relevantInfo = info; + } + } + } + + if (relevantInfo != null) { + try { + return Optional.of(new URL(relevantInfo.getUrl())); + } catch (final MalformedURLException malformedUrlException) { + LOG.error("A download URL is defect.", malformedUrlException); + } + } + + return Optional.empty(); + } + private static boolean isFileTypeRelevant(final Map.Entry<Qualities, Set<ArdFilmUrlInfoDto>> entry) { return entry.getValue().stream() .anyMatch(video -> video.getFileType().isPresent() @@ -212,7 +264,8 @@ public class ArdMediaArrayToDownloadUrlsConverter { case 3: case 4: return Qualities.HD; - + case 5: + return Qualities.UHD; case 2: default: return Qualities.NORMAL; diff --git a/src/main/java/mServer/crawler/sender/base/Qualities.java b/src/main/java/mServer/crawler/sender/base/Qualities.java index <HASH>..<HASH> 100644 --- a/src/main/java/mServer/crawler/sender/base/Qualities.java +++ b/src/main/java/mServer/crawler/sender/base/Qualities.java @@ -2,7 +2,7 @@ package mServer.crawler.sender.base; public enum Qualities { - HD("HD"), NORMAL("Normal"), SMALL("Klein"); + HD("HD"), NORMAL("Normal"), SMALL("Klein"), UHD("UHD"); private final String description;
#<I> handle uhd urls and use lowest hd url for normal if normal is missing
mediathekview_MServer
train
c6e8d387af359e9ca077d27d752be3341550a09b
diff --git a/src/methods/create.js b/src/methods/create.js index <HASH>..<HASH> 100644 --- a/src/methods/create.js +++ b/src/methods/create.js @@ -21,11 +21,11 @@ export default function({model, req, res}) { if (documents.length === 1) { return res .status(201) - .body = documents[0].get({plain: true}) + .body = documents[0].toJSON() } res .status(201) - .body = documents.map(document => document.get({plain: true})) + .body = documents.map(document => document.toJSON()) }) } diff --git a/src/methods/read.js b/src/methods/read.js index <HASH>..<HASH> 100644 --- a/src/methods/read.js +++ b/src/methods/read.js @@ -18,7 +18,7 @@ function fetchAll({model, req, res}) { .then(documents => { res .status(200) - .body = documents.map(document => document.get({plain: true})) + .body = documents.map(document => document.toJSON()) }) .then(() => { return model.count()
refactor: conistent use of `toJSON` All documents are transformed into a plain JavaScript object before stored into `response.body`.
netiam_contrib-rest
train
3fd44d65645cb8177b480f7e61dedc6142b19d21
diff --git a/src/Flysystem/AssetAdapter.php b/src/Flysystem/AssetAdapter.php index <HASH>..<HASH> 100644 --- a/src/Flysystem/AssetAdapter.php +++ b/src/Flysystem/AssetAdapter.php @@ -6,6 +6,7 @@ use League\Flysystem\Adapter\Local; use League\Flysystem\Config as FlysystemConfig; use SilverStripe\Assets\File; use SilverStripe\Assets\Filesystem; +use SilverStripe\Core\Config\Config; use SilverStripe\Core\Config\Configurable; use SilverStripe\ORM\ArrayList; use SilverStripe\View\ArrayData; @@ -144,9 +145,16 @@ class AssetAdapter extends Local } } + Config::nest(); + Config::modify()->set(SSViewer::class, 'source_file_comments', false); + $viewer = SSViewer::create(array($template)); - return (string)$viewer->process(new ArrayData(array( + $result = (string)$viewer->process(new ArrayData(array( 'AllowedExtensions' => $allowedExtensions ))); + + Config::unnest(); + + return $result; } }
FIX Ensure source file comments are disabled when generating htaccess file
silverstripe_silverstripe-assets
train
6534e6606d2b22a9eb874db8251536b4c050f425
diff --git a/GPy/testing/model_tests.py b/GPy/testing/model_tests.py index <HASH>..<HASH> 100644 --- a/GPy/testing/model_tests.py +++ b/GPy/testing/model_tests.py @@ -126,6 +126,23 @@ class MiscTests(unittest.TestCase): m2.kern[:] = m.kern[''].values() np.testing.assert_equal(m.log_likelihood(), m2.log_likelihood()) + def test_big_model(self): + m = GPy.examples.dimensionality_reduction.mrd_simulation(optimize=0, plot=0, plot_sim=0) + m.X.fix() + print m + m.unfix() + m.checkgrad() + print m + m.fix() + print m + m.inducing_inputs.unfix() + print m + m.checkgrad() + m.unfix() + m.checkgrad() + m.checkgrad() + print m + def test_model_set_params(self): m = GPy.models.GPRegression(self.X, self.Y) lengthscale = np.random.uniform()
[tests] added some unfix fix print and gradcheck tests, it basically just behaves as a user would do with a model
SheffieldML_GPy
train
06eebb1d87a519ea97adb435b4f6c42df6dbc027
diff --git a/iapws/iapws95.py b/iapws/iapws95.py index <HASH>..<HASH> 100644 --- a/iapws/iapws95.py +++ b/iapws/iapws95.py @@ -456,11 +456,13 @@ class MEoS(_fase): rhoL = self._Liquid_Density(T) rhoG = self._Vapor_Density(T) - g = 1000. + g = 500. erroro = 1e6 rholo = rhoL rhogo = rhoG + contador = 0 while True: + contador += 1 deltaL = rhoL/self.rhoc deltaG = rhoG/self.rhoc liquido = self._Helmholtz(rhoL, T) @@ -475,19 +477,21 @@ class MEoS(_fase): Kdv = 2*vapor["fird"]+deltaG*vapor["firdd"]+1/deltaG Delta = Jdv*Kdl-Jdl*Kdv error = abs(Kv-Kl)+abs(Jv-Jl) -# print error, g - if error < 1e-12: + if error < 1e-12 or contador > 150: break elif error > erroro: rhoL = rholo rhoG = rhogo - g = g/2. + g = g*0.5 else: erroro = error rholo = rhoL rhogo = rhoG rhoL = rhoL+g/Delta*((Kv-Kl)*Jdv-(Jv-Jl)*Kdv) rhoG = rhoG+g/Delta*((Kv-Kl)*Jdl-(Jv-Jl)*Kdl) + if error > 1e-7: + print("Iteration don´t converge") + Ps = self.R*T*rhoL*rhoG/(rhoL-rhoG)*(liquido["fir"]-vapor["fir"]+log(deltaL/deltaG)) return rhoL, rhoG, Ps
fix saturation algorithm for converge in D2O
jjgomera_iapws
train
5a5929ab2e920891e79553c8d2cdb3eb489405f5
diff --git a/lib/joinable/acts_as_joinable_component.rb b/lib/joinable/acts_as_joinable_component.rb index <HASH>..<HASH> 100644 --- a/lib/joinable/acts_as_joinable_component.rb +++ b/lib/joinable/acts_as_joinable_component.rb @@ -180,24 +180,33 @@ module Joinable #:nodoc: end end - # Delegate view_permission to permission_link - def view_permission + # inherited_view_permission is calculated by ascending up the chain of joinable components + # while view permission only takes into account the current joinable component. + # inherited_view_permission is for external use while view_permission should only be used internally. + def inherited_view_permission permission_link.try(:component_view_permission) end - + + def view_permission + klass_view_permission = self.class.view_permission + klass_view_permission = klass_view_permission.call(self) if klass_view_permission.respond_to?(:call) + + # Allow view_permission to be set at the instance level + return @view_permission || klass_view_permission + end + attr_writer :view_permission + + # Recurse up the tree to see if any of the intervening joinable_components have a customized view permission # In that case, inherit that customized view permission. This allows searches of the form # Feed.with_permission(:view) where feeds belong to joinable_components with custom view permissions. # The query will then be able to return only the feeds which belong to joinable components that are viewable by the user - def recurse_to_inherit_custom_view_permission(current_view_permission = self.class.view_permission) + def recurse_to_inherit_custom_view_permission parent = next_link - if parent.acts_like?(:joinable) - if current_view_permission.respond_to?(:call) - return current_view_permission.call(self) - else - return current_view_permission || :view - end + # If we've reached the last component in the chain or if this component provides a view permission + if parent.acts_like?(:joinable) || self.view_permission + return self.view_permission || :view elsif parent.acts_like?(:joinable_component) return parent.recurse_to_inherit_custom_view_permission else
Allow view_permission to be set at the instance level. Stop recursing if the component specifies a view_permission.
rrn_acts_as_joinable
train
b8cdc964a63c0d47819ca559e05c47627468c461
diff --git a/classes/World.js b/classes/World.js index <HASH>..<HASH> 100644 --- a/classes/World.js +++ b/classes/World.js @@ -1,4 +1,5 @@ -var fs = require('fs'); +var fs = require('fs'), + pth = require('path'); /** * The World class is the main game driver. It determines which files to load * and from where, stores all rooms and objects, handles loading of rooms and @@ -50,7 +51,7 @@ World = new Class({ }); this.set('name', config.world_name); this.config = config; - this.worldPath = ENGINE_PATH+config.world_path+'/'; + this.worldPath = require('path').join(config.world_path); this.defaultRoom = config.start_room; this.players = this.players; this.rooms = this.rooms; @@ -60,8 +61,8 @@ World = new Class({ }, initializeRooms: function() { - - var path = this.joinPath(this.worldPath+this.roomPath); + + var path = this.joinPath(this.worldPath+'/'+this.roomPath); //Recursive glob of all .js files in rooms/. var files = this.globJS(path); @@ -142,7 +143,7 @@ World = new Class({ } if (!this.rooms[path]) { var room = this.loadModule(this.roomPath+path); - if (!room) { log_error("Room not found for "+path); } + if (!room) { console.warn("Room not found for "+path); } if (room) { this.rooms[path] = new room(this, path); this.rooms[path].create(); @@ -248,8 +249,8 @@ World = new Class({ filename = this.joinPath(filename); //Synchronous is OK in this case because we'll be loading these files on initialization. - var files = []; - var stats = fs.statSync(filename); + var files = [], stats; + stats = fs.statSync(filename); if (stats.isFile() && filename.match(/\.js$/)) { files.push(filename); } else if (stats.isDirectory()) {
Changed the way paths are handled in the World class.
Yuffster_discord-engine
train
a06b26b730f5bf997dbcbe22e01c2fe006830530
diff --git a/moskito-aop/src/main/java/net/anotheria/moskito/aop/aspect/MonitoringBaseAspect.java b/moskito-aop/src/main/java/net/anotheria/moskito/aop/aspect/MonitoringBaseAspect.java index <HASH>..<HASH> 100644 --- a/moskito-aop/src/main/java/net/anotheria/moskito/aop/aspect/MonitoringBaseAspect.java +++ b/moskito-aop/src/main/java/net/anotheria/moskito/aop/aspect/MonitoringBaseAspect.java @@ -56,13 +56,18 @@ public class MonitoringBaseAspect extends AbstractMoskitoAspect<ServiceStats>{ String producerId = producer.getProducerId(); String prevProducerId = lastProducerId.get(); lastProducerId.set(producerId); + //calculate cumulated stats (default stats). + //we only do this if previous producer wasn't same as current -> meaning if we call a second method in the same producer we don't count it as new call. + boolean calculateCumulatedStats = !producerId.equals(prevProducerId); String methodName = getMethodStatName(pjp.getSignature()); ServiceStats defaultStats = producer.getDefaultStats(); ServiceStats methodStats = producer.getStats(methodName); final Object[] args = pjp.getArgs(); - defaultStats.addRequest(); + if (calculateCumulatedStats) { + defaultStats.addRequest(); + } if (methodStats != null) { methodStats.addRequest(); } @@ -112,7 +117,8 @@ public class MonitoringBaseAspect extends AbstractMoskitoAspect<ServiceStats>{ ret = pjp.proceed(); return ret; } catch (InvocationTargetException e) { - defaultStats.notifyError(e.getTargetException()); + if (calculateCumulatedStats) + defaultStats.notifyError(e.getTargetException()); if (methodStats != null) { methodStats.notifyError(); } @@ -121,7 +127,8 @@ public class MonitoringBaseAspect extends AbstractMoskitoAspect<ServiceStats>{ } throw e.getCause(); } catch (Throwable t) { - defaultStats.notifyError(t); + if (calculateCumulatedStats) + defaultStats.notifyError(t); if (methodStats != null) { methodStats.notifyError(); } @@ -134,14 +141,16 @@ public class MonitoringBaseAspect extends AbstractMoskitoAspect<ServiceStats>{ throw t; } finally { long exTime = System.nanoTime() - startTime; - if (!producerId.equals(prevProducerId)) { + if (calculateCumulatedStats) { defaultStats.addExecutionTime(exTime); } if (methodStats != null) { methodStats.addExecutionTime(exTime); } lastProducerId.set(prevProducerId); - defaultStats.notifyRequestFinished(); + if (calculateCumulatedStats) { + defaultStats.notifyRequestFinished(); + } if (methodStats != null) { methodStats.notifyRequestFinished(); }
calculate cumulate stats is conditional now to preventing calculating double for cumulating if we call from one method to another
anotheria_moskito
train
1e2b9b75c720a93888d3386f18937fff232e4af4
diff --git a/src/main/java/com/buschmais/jqassistant/plugin/common/impl/scanner/AbstractContainerScannerPlugin.java b/src/main/java/com/buschmais/jqassistant/plugin/common/impl/scanner/AbstractContainerScannerPlugin.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/buschmais/jqassistant/plugin/common/impl/scanner/AbstractContainerScannerPlugin.java +++ b/src/main/java/com/buschmais/jqassistant/plugin/common/impl/scanner/AbstractContainerScannerPlugin.java @@ -29,13 +29,15 @@ public abstract class AbstractContainerScannerPlugin<I, E> extends AbstractScann public final FileDescriptor scan(I container, String path, Scope scope, Scanner scanner) throws IOException { ScannerContext context = scanner.getContext(); FileContainerDescriptor containerDescriptor = getContainerDescriptor(container, context); + LOGGER.info("Entering {}", path); context.push(FileContainerDescriptor.class, containerDescriptor); try { - for (E e : getEntries(container)) { + Iterable<? extends E> entries = getEntries(container); + for (E e : entries) { try (VirtualEntry entry = getEntry(container, e)) { String relativePath = getRelativePath(container, e); Scope entryScope = createScope(scope); - LOGGER.info("Scanning entry '{}'.", relativePath); + LOGGER.info("Scanning {}", relativePath); FileDescriptor descriptor = scanner.scan(entry, relativePath, entryScope); if (containerDescriptor != null) { containerDescriptor.getContains().add(descriptor); @@ -44,6 +46,7 @@ public abstract class AbstractContainerScannerPlugin<I, E> extends AbstractScann } } finally { context.pop(FileContainerDescriptor.class); + LOGGER.info("Leaving {}", path); } return containerDescriptor; } diff --git a/src/main/java/com/buschmais/jqassistant/plugin/common/impl/scanner/AbstractDirectoryScannerPlugin.java b/src/main/java/com/buschmais/jqassistant/plugin/common/impl/scanner/AbstractDirectoryScannerPlugin.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/buschmais/jqassistant/plugin/common/impl/scanner/AbstractDirectoryScannerPlugin.java +++ b/src/main/java/com/buschmais/jqassistant/plugin/common/impl/scanner/AbstractDirectoryScannerPlugin.java @@ -64,7 +64,6 @@ public abstract class AbstractDirectoryScannerPlugin extends AbstractContainerSc } }; Files.walkFileTree(directoryPath, visitor); - LOGGER.info("Scanning directory '{}' [{} entries].", container.getAbsolutePath(), files.size()); return files; }
#<I> added example CSV scanner plugin
buschmais_jqa-plugin-common
train
4e7520f26a49b5d3b45189cdd375904feb16d4ee
diff --git a/aws-sdk-core/lib/aws.rb b/aws-sdk-core/lib/aws.rb index <HASH>..<HASH> 100644 --- a/aws-sdk-core/lib/aws.rb +++ b/aws-sdk-core/lib/aws.rb @@ -10,45 +10,45 @@ module Aws # @api private # services - SERVICE_MODULE_NAMES = [ - :AutoScaling, - :CloudFormation, - :CloudFront, - :CloudSearch, - :CloudSearchDomain, - :CloudTrail, - :CloudWatch, - :CloudWatchLogs, - :CognitoIdentity, - :CognitoSync, - :DataPipeline, - :DirectConnect, - :DynamoDB, - :EC2, - :ElastiCache, - :ElasticBeanstalk, - :ElasticLoadBalancing, - :ElasticTranscoder, - :EMR, - :Glacier, - :IAM, - :ImportExport, - :Kinesis, - :OpsWorks, - :RDS, - :Redshift, - :Route53, - :Route53Domains, - :S3, - :SES, - :SimpleDB, - :SNS, - :SQS, - :StorageGateway, - :STS, - :Support, - :SWF, - ] + SERVICE_MODULE_NAMES = %w( + AutoScaling + CloudFormation + CloudFront + CloudSearch + CloudSearchDomain + CloudTrail + CloudWatch + CloudWatchLogs + CognitoIdentity + CognitoSync + DataPipeline + DirectConnect + DynamoDB + EC2 + ElastiCache + ElasticBeanstalk + ElasticLoadBalancing + ElasticTranscoder + EMR + Glacier + IAM + ImportExport + Kinesis + OpsWorks + RDS + Redshift + Route53 + Route53Domains + S3 + SES + SimpleDB + SNS + SQS + StorageGateway + STS + Support + SWF + ) @config = {} @services = {}
Converted the list of service module names from symbols to strings.
aws_aws-sdk-ruby
train
bedc3a242bdb3738e24b513b12f75dcc6746715f
diff --git a/tests/Sniffs/PHP/data/uselessParenthesesNoErrors.php b/tests/Sniffs/PHP/data/uselessParenthesesNoErrors.php index <HASH>..<HASH> 100644 --- a/tests/Sniffs/PHP/data/uselessParenthesesNoErrors.php +++ b/tests/Sniffs/PHP/data/uselessParenthesesNoErrors.php @@ -41,6 +41,7 @@ $class = new class ($number) { $b = $array['function']($parameter); +$foo = new Foo(); new self($a); new static($b); new parent($c);
UselessParenthesesSniff: More tests
slevomat_coding-standard
train
653bc1fc72cde666373f039af99ad91076c1249f
diff --git a/examples/tutorial_pandas.py b/examples/tutorial_pandas.py index <HASH>..<HASH> 100644 --- a/examples/tutorial_pandas.py +++ b/examples/tutorial_pandas.py @@ -20,7 +20,7 @@ def main(host='localhost', port=8086): client.create_database(dbname) print("Write DataFrame") - client.write_points({'demo':df}) + client.write_points({'demo': df}) print("Read DataFrame") client.query("select * from demo") diff --git a/influxdb/dataframe_client.py b/influxdb/dataframe_client.py index <HASH>..<HASH> 100644 --- a/influxdb/dataframe_client.py +++ b/influxdb/dataframe_client.py @@ -7,6 +7,11 @@ import warnings from .client import InfluxDBClient +try: + import pandas as pd +except ImportError: + pd = None + class DataFrameClient(InfluxDBClient): """ @@ -17,13 +22,9 @@ class DataFrameClient(InfluxDBClient): def __init__(self, *args, **kwargs): super(DataFrameClient, self).__init__(*args, **kwargs) - try: - global pd - import pandas as pd - except ImportError as ex: + if not pd: raise ImportError( - 'DataFrameClient requires Pandas, "{ex}" problem importing' - .format(ex=str(ex)) + 'DataFrameClient requires Pandas' ) self.EPOCH = pd.Timestamp('1970-01-01 00:00:00.000+00:00')
fix handle error when trying to import pandas
influxdata_influxdb-python
train
0598385231a25e0616ec1087f9ff6c06580a2c1e
diff --git a/state/allcollections.go b/state/allcollections.go index <HASH>..<HASH> 100644 --- a/state/allcollections.go +++ b/state/allcollections.go @@ -288,7 +288,8 @@ func allCollections() CollectionSchema { Key: []string{"model-uuid", "machineid"}, }}, }, - minUnitsC: {}, + unitStatesC: {}, + minUnitsC: {}, // This collection holds documents that indicate units which are queued // to be assigned to machines. It is used exclusively by the @@ -623,6 +624,7 @@ const ( txnLogC = "txns.log" txnsC = "txns" unitsC = "units" + unitStatesC = "unitstates" upgradeInfoC = "upgradeInfo" userLastLoginC = "userLastLogin" usermodelnameC = "usermodelname"
Ensure unitstate collection is created if missing
juju_juju
train
81ae0877f1861bcd3ab93df360f1c7412b2967f1
diff --git a/src/js/touch-carousel.js b/src/js/touch-carousel.js index <HASH>..<HASH> 100644 --- a/src/js/touch-carousel.js +++ b/src/js/touch-carousel.js @@ -58,7 +58,10 @@ TouchCarousel.prototype._regTouchGestures = function() { this.$itemsWrapper .add(this.$indicators) // fixes issue #9 - .hammer({ drag_lock_to_axis: true }) + .hammer({ + drag_lock_to_axis: true, + preventDefault: true, + }) .on("release dragleft dragright swipeleft swiperight", $.proxy(this._handleGestures, this)); } @@ -141,8 +144,6 @@ } TouchCarousel.prototype._handleGestures = function( e ) { - // disable browser scrolling - e.gesture.preventDefault(); if(this.sliding) return;
preventDefault Prevent the default gesture on the browser... so it doesn't slide up and down ... the e.prevenDefault is not working for me... it might be because of the newer hammer js version ... i dont know... this way it seems to work
ixisio_bootstrap-touch-carousel
train
ba27c7e2210779e06b39e3b1332d2493baa08a5b
diff --git a/cirq-core/cirq/ops/pauli_string.py b/cirq-core/cirq/ops/pauli_string.py index <HASH>..<HASH> 100644 --- a/cirq-core/cirq/ops/pauli_string.py +++ b/cirq-core/cirq/ops/pauli_string.py @@ -745,7 +745,7 @@ class PauliString(raw_types.Operation, Generic[TKey]): while any(result.shape): result = np.trace(result, axis1=0, axis2=len(result.shape) // 2) - return float(result * self.coefficient) + return float(np.real(result * self.coefficient)) def zip_items( self, other: 'cirq.PauliString[TKey]'
Avoid warning on complex-to-float conversion (#<I>) Make it clear we want to use the real part.
quantumlib_Cirq
train
e8c20e38517b57da4649b65d1c7461bf5b96b3b3
diff --git a/session.go b/session.go index <HASH>..<HASH> 100644 --- a/session.go +++ b/session.go @@ -194,7 +194,7 @@ func (s Session) SubredditComments(subreddit string, params ListingOptions) ([]* if err != nil { return nil, err } - + var comments interface{} if err = json.NewDecoder(body).Decode(&comments); err != nil { return nil, err @@ -204,3 +204,86 @@ func (s Session) SubredditComments(subreddit string, params ListingOptions) ([]* return helper.comments, nil } + +// RedditorComments returns a slice of Comments from a given Reddit user name. +func (s Session) RedditorComments(username string, params ListingOptions) ([]*Comment, error) { + v, err := query.Values(params) + if err != nil { + return nil, err + } + + baseUrl := "https://www.reddit.com" + + // If username given, add to URL + if username != "" { + baseUrl += "/user/" + username + } + + redditUrl := fmt.Sprintf(baseUrl+"/comments.json?%s", v.Encode()) + + req := request{ + url: redditUrl, + useragent: s.useragent, + } + + body, err := req.getResponse() + if err != nil { + return nil, err + } + + var comments interface{} + if err = json.NewDecoder(body).Decode(&comments); err != nil { + return nil, err + } + helper := new(helper) + helper.buildComments(comments) + + return helper.comments, nil +} + +// RedditorSubmissions returns a slice of Submissions from a given Reddit user name. +func (s Session) RedditorSubmissions(username string, params ListingOptions) ([]*Submission, error) { + v, err := query.Values(params) + if err != nil { + return nil, err + } + + baseUrl := "https://www.reddit.com" + + // If username given, add to URL + if username != "" { + baseUrl += "/user/" + username + } + + redditUrl := fmt.Sprintf(baseUrl+"/submitted.json?%s", v.Encode()) + + req := request{ + url: redditUrl, + useragent: s.useragent, + } + body, err := req.getResponse() + if err != nil { + return nil, err + } + + type Response struct { + Data struct { + Children []struct { + Data *Submission + } + } + } + + r := new(Response) + err = json.NewDecoder(body).Decode(r) + if err != nil { + return nil, err + } + + submissions := make([]*Submission, len(r.Data.Children)) + for i, child := range r.Data.Children { + submissions[i] = child.Data + } + + return submissions, nil +}
added functions to read in user comments and submissions (#<I>)
jzelinskie_geddit
train
6f3af91c0bebe0bb566f2a589558520fe952f632
diff --git a/lib/apivore.rb b/lib/apivore.rb index <HASH>..<HASH> 100644 --- a/lib/apivore.rb +++ b/lib/apivore.rb @@ -23,20 +23,39 @@ module Apivore result == [] end - def paths() - @json['paths'] + def paths(filter = nil) + result = @json['paths'].collect { |p| Path.new(p) } + unless filter.nil? + result.select! { |p| p.has_method?(filter) } + end + result end + end - def has_model?(path, method, response = '200') - # path is the parsed json 'path' from the api description - unless path[1][method]['responses'][response].nil? - schema = path[1][method]['responses'][response]['schema'] - puts "DEBUG: #{schema}" - schema != nil - else - # this path / method combination does not have a 200 response defined, therefore return false - false - end + class Path + def initialize(path_data) + @name = path_data.first + @method_data = path_data.last + end + + def has_method?(method) + @method_data.each { |m| return true if m.first == method } + false + end + + def has_model?(method, response = '200') + !@method_data[method]['responses'][response].nil? + end + + def get_model(method, response = '200') + object = SchemaObject.new(@method_data[method]['responses'][response]) + object.model + end + end + + class SchemaObject + def initialize(schema) + @body = schema end end diff --git a/lib/apivore/rspec_matchers.rb b/lib/apivore/rspec_matchers.rb index <HASH>..<HASH> 100644 --- a/lib/apivore/rspec_matchers.rb +++ b/lib/apivore/rspec_matchers.rb @@ -20,8 +20,8 @@ module Apivore match do |body| @d = ApiDescription.new(body) pass = true - @d.paths.each do |path| - pass &= @d.has_model?(path,'get', '200') + @d.paths('get').each do |path| + pass &= path.has_model?('get', '200') end pass end
split out Paths and Schema Objects to their own classes
westfieldlabs_apivore
train
f7fcf102b6a71e11e36cd7e426af9591a1fb4655
diff --git a/clients/unshaded/src/main/java/tachyon/hadoop/HdfsFileInputStream.java b/clients/unshaded/src/main/java/tachyon/hadoop/HdfsFileInputStream.java index <HASH>..<HASH> 100644 --- a/clients/unshaded/src/main/java/tachyon/hadoop/HdfsFileInputStream.java +++ b/clients/unshaded/src/main/java/tachyon/hadoop/HdfsFileInputStream.java @@ -118,6 +118,7 @@ public class HdfsFileInputStream extends InputStream implements Seekable, Positi mClosed = true; } + // TODO(calvin): Consider removing this when the recovery logic is available in FileInStream private void getHdfsInputStream() throws IOException { if (mHdfsInputStream == null) { FileSystem fs = mHdfsPath.getFileSystem(mHadoopConf);
Add todo for cleaning up HdfsFileInputStream.
Alluxio_alluxio
train
7028666c59127592e8d0306a112c71618106a678
diff --git a/lib/api/compile_and_load.js b/lib/api/compile_and_load.js index <HASH>..<HASH> 100644 --- a/lib/api/compile_and_load.js +++ b/lib/api/compile_and_load.js @@ -5,8 +5,7 @@ var tmp = require("temp"), binding = require("./binding"); var rootDir = path.join(__dirname, "..", ".."), - headerDir = path.join(rootDir, "vendor", "tree-sitter", "include"), - runtimeLibPath = path.join(rootDir, "node_modules", "tree-sitter", "build", "Release", "runtime.a"); + headerDir = path.join(rootDir, "vendor", "tree-sitter", "include"); module.exports = function compileAndLoad(grammar) { var code = binding.compile(grammar), @@ -21,7 +20,7 @@ module.exports = function compileAndLoad(grammar) { if (status != 0) throw new Error("failed to compile C code"); - status = sh.run("gcc -shared -Wl " + objPath + " " + runtimeLibPath + " -o " + libPath); + status = sh.run("gcc -shared -Wl " + objPath + " -o " + libPath); if (status != 0) throw new Error("failed to link C code");
In compileAndLoad, don't link to tree-sitter runtime
tree-sitter_tree-sitter-cli
train
86dcbc5a8657e1028eb233282c0e85186129dbb0
diff --git a/src/python/pants/backend/codegen/tasks/apache_thrift_gen.py b/src/python/pants/backend/codegen/tasks/apache_thrift_gen.py index <HASH>..<HASH> 100644 --- a/src/python/pants/backend/codegen/tasks/apache_thrift_gen.py +++ b/src/python/pants/backend/codegen/tasks/apache_thrift_gen.py @@ -29,7 +29,7 @@ from pants.thrift_util import calculate_compile_roots, select_thrift_binary from pants.util.keywords import replace_python_keywords_in_file from pants.util.dirutil import safe_mkdir, safe_walk -INCLUDE_RE = re.compile(r'include "(.*?)"') +INCLUDE_RE = re.compile(r'include (?:"(.*?)"|\'(.*?)\')') def _copytree(from_base, to_base): def abort(error): @@ -191,7 +191,7 @@ class ApacheThriftGen(CodeGen): match = INCLUDE_RE.match(line) if not match: continue - includefile = match.group(1) + includefile = match.group(1) or match.group(2) includefile_abspath = None for base in bases: # Maybe it's a path relative to a base
Handle both single and double quotes for thrift includes Testing Done: Ran existing test; manual test with single quotes. Reviewed at <URL>
pantsbuild_pants
train
daaa919005599ef24516c673b64473944181beaf
diff --git a/src/engine/engineState.js b/src/engine/engineState.js index <HASH>..<HASH> 100644 --- a/src/engine/engineState.js +++ b/src/engine/engineState.js @@ -637,6 +637,14 @@ export class EngineState { this.addressCache = cacheJson.addresses this.txHeightCache = cacheJson.heights + // Fillup the missing headers to fetch + for (const txid in this.txHeightCache) { + const height = this.txHeightCache[txid].height + if (!this.pluginState.headerCache[`${height}`]) { + this.missingHeaders[`${height}`] = true + } + } + // Update the derived information: for (const scriptHash of Object.keys(this.addressCache)) { const address = this.addressCache[scriptHash]
fill up the missing header list when loading from cache, otherwise it would never fetch headers for an already recived transaction.
EdgeApp_edge-currency-bitcoin
train
36359fac95c78ffdf76fbc47a26c7c8ad21685b1
diff --git a/openquake/calculators/ebrisk.py b/openquake/calculators/ebrisk.py index <HASH>..<HASH> 100644 --- a/openquake/calculators/ebrisk.py +++ b/openquake/calculators/ebrisk.py @@ -45,11 +45,12 @@ def start_ebrisk(gmfgetter, param, monitor): assets_by_site = assetcol.assets_by_site() with monitor('filtering ruptures'): gmfgetter.init() - yield from parallel.split_task( - ebrisk, gmfgetter.computers, gmfgetter.gmv_dt, gmfgetter.min_iml, - gmfgetter.rlzs_by_gsim, gmfgetter.weights, - assets_by_site, assetcol.tagcol, param, monitor, - duration=param['task_duration']) + if gmfgetter.computers: + yield from parallel.split_task( + ebrisk, gmfgetter.computers, gmfgetter.gmv_dt, gmfgetter.min_iml, + gmfgetter.rlzs_by_gsim, gmfgetter.weights, + assets_by_site, assetcol.tagcol, param, monitor, + duration=param['task_duration']) def _calc(computers, gmv_dt, events, min_iml, rlzs_by_gsim, weights, @@ -92,10 +93,7 @@ def _calc(computers, gmv_dt, events, min_iml, rlzs_by_gsim, weights, else: losses = lratios * asset['value-' + lt] if param['asset_loss_table']: - try: - alt[aid, eidx, lti] = losses - except: - import pdb; pdb.set_trace() + alt[aid, eidx, lti] = losses losses_by_lt[lt] = losses for loss_idx, losses in lba.compute(asset, losses_by_lt): acc[(eidx, loss_idx) + tagidxs] += losses
Cleanup [skip CI]
gem_oq-engine
train
33183fceb9bc02e07982a9bcfbe94af33519234d
diff --git a/nion/instrumentation/scan_base.py b/nion/instrumentation/scan_base.py index <HASH>..<HASH> 100644 --- a/nion/instrumentation/scan_base.py +++ b/nion/instrumentation/scan_base.py @@ -36,6 +36,9 @@ class ScanFrameParameters: self.pixel_time_us = d.get("pixel_time_us", 10) self.fov_nm = d.get("fov_nm", 8) self.rotation_rad = d.get("rotation_rad", 0) + self.subscan_pixel_size = None + self.subscan_fractional_size = None + self.subscan_fractional_center = None self.external_clock_wait_time_ms = d.get("external_clock_wait_time_ms", 0) self.external_clock_mode = d.get("external_clock_mode", 0) # 0=off, 1=on:rising, 2=on:falling self.ac_line_sync = d.get("ac_line_sync", False) @@ -43,7 +46,7 @@ class ScanFrameParameters: self.flyback_time_us = d.get("flyback_time_us", 30.0) def as_dict(self): - return { + d = { "size": self.size, "center_nm": self.center_nm, "fov_size_nm": self.fov_size_nm, @@ -56,6 +59,13 @@ class ScanFrameParameters: "ac_frame_sync": self.ac_frame_sync, "flyback_time_us": self.flyback_time_us, } + if self.subscan_pixel_size is not None: + d["subscan_pixel_size"] = self.subscan_pixel_size + if self.subscan_fractional_size is not None: + d["subscan_fractional_size"] = self.subscan_fractional_size + if self.subscan_fractional_center is not None: + d["subscan_fractional_center"] = self.subscan_fractional_center + return d def __repr__(self): return "size pixels: " + str(self.size) +\ @@ -258,23 +268,13 @@ class ScanAcquisitionTask(HardwareSource.AcquisitionTask): def __activate_frame_parameters(self): device_frame_parameters = copy.copy(self.__frame_parameters) - if device_frame_parameters.size[0] > device_frame_parameters.size[1]: - device_frame_parameters.fov_size_nm = device_frame_parameters.fov_nm, device_frame_parameters.fov_nm * device_frame_parameters.size[1] / device_frame_parameters.size[0] - elif device_frame_parameters.size[0] < device_frame_parameters.size[1]: - device_frame_parameters.fov_size_nm = device_frame_parameters.fov_nm * device_frame_parameters.size[0] / device_frame_parameters.size[1], device_frame_parameters.fov_nm - else: - device_frame_parameters.fov_size_nm = device_frame_parameters.fov_nm, device_frame_parameters.fov_nm + context_size = Geometry.FloatSize.make(device_frame_parameters.size) + device_frame_parameters.fov_size_nm = device_frame_parameters.fov_nm * context_size.aspect_ratio, device_frame_parameters.fov_nm if self.subscan_enabled and self.subscan_region: subscan_region = Geometry.FloatRect.make(self.subscan_region) - size_y = int(device_frame_parameters.size[0] * subscan_region.height) - size_x = int(device_frame_parameters.size[1] * subscan_region.width) - fov_size_nm_y = device_frame_parameters.fov_size_nm[0] * subscan_region.height - fov_size_nm_x = device_frame_parameters.fov_size_nm[1] * subscan_region.width - center_nm_y = device_frame_parameters.fov_size_nm[0] * (subscan_region.center.y - 0.5) - center_nm_x = device_frame_parameters.fov_size_nm[1] * (subscan_region.center.x - 0.5) - device_frame_parameters.size = size_y, size_x - device_frame_parameters.fov_size_nm = fov_size_nm_y, fov_size_nm_x - device_frame_parameters.center_nm = center_nm_y, center_nm_x + device_frame_parameters.subscan_pixel_size = int(context_size.height * subscan_region.height), int(context_size.width * subscan_region.width) + device_frame_parameters.subscan_fractional_size = subscan_region.height, subscan_region.width + device_frame_parameters.subscan_fractional_center = subscan_region.center.y, subscan_region.center.x self.__device.set_frame_parameters(device_frame_parameters)
Rework subscan params to pass context and subscan, useful for calculation.
nion-software_nionswift-instrumentation-kit
train
7b0445ec6c019c03cc4a57975e84f02363d6f14f
diff --git a/pkg/cmd/server/kube_master.go b/pkg/cmd/server/kube_master.go index <HASH>..<HASH> 100644 --- a/pkg/cmd/server/kube_master.go +++ b/pkg/cmd/server/kube_master.go @@ -47,7 +47,7 @@ func (cfg Config) BuildKubernetesMasterConfig(requestContextMapper kapi.RequestC kmaster := &kubernetes.MasterConfig{ MasterIP: masterIP, MasterPort: cfg.MasterAddr.Port, - NodeHosts: cfg.NodeList, + NodeHosts: cfg.GetNodeList(), PortalNet: &portalNet, RequestContextMapper: requestContextMapper, EtcdHelper: ketcdHelper,
fix nodelist access for kube master
openshift_origin
train
d03a564cd0d8e91fffef9e7b056e2d1de2fd734d
diff --git a/tagcube_cli/cli.py b/tagcube_cli/cli.py index <HASH>..<HASH> 100644 --- a/tagcube_cli/cli.py +++ b/tagcube_cli/cli.py @@ -145,7 +145,7 @@ class TagCubeCLI(object): else: if cmd_args.target_url is None: parser.error('argument --target-url is required') - elif not (cmd_args.target_url.startswith('http://') or\ + elif not (cmd_args.target_url.startswith('http://') or \ cmd_args.target_url.startswith('https://')): parser.error('Invalid target URL: "%s"' % cmd_args.target_url) @@ -157,6 +157,12 @@ class TagCubeCLI(object): if not is_valid_email(cmd_args.email_notify): parser.error('Invalid notification email: "%s"' % cmd_args.email_notify) + if cmd_args.path_file is not None: + try: + cmd_args.path_file = path_file_to_list(cmd_args.path_file) + except ValueError, ve: + parser.error('%s' % ve) + level = logging.DEBUG if cmd_args.verbose else logging.INFO logging.basicConfig(format='%(message)s', level=level) @@ -233,3 +239,48 @@ def is_valid_email(email): Very trivial check to verify that the user provided parameter is an email """ return '@' in email and '.' in email + + +def is_valid_path(path): + """ + :return: True if the path is valid, else raise a ValueError with the + specific error + """ + if not path.startswith('/'): + msg = 'Invalid path "%s". Paths need to start with "/".' + raise ValueError(msg % path[:40]) + + for c in ' \t': + if c in path: + msg = 'Invalid character "%s" found in path. Paths need to be' \ + ' URL-encoded.' + raise ValueError(msg % c) + + return True + + +def path_file_to_list(path_file): + """ + :return: A list with the paths which are stored in a text file in a line-by- + line format. Validate each path using is_valid_path + """ + paths = [] + + for line_no, line in enumerate(path_file.readlines(), start=1): + line = line.strip() + + if not line: + # Blank line support + continue + + if line.startswith('#'): + # Comment support + continue + + try: + is_valid_path(line) + paths.append(line) + except ValueError, ve: + raise ValueError('%s Error found in line %s.' % (ve, line_no)) + + return paths \ No newline at end of file diff --git a/tagcube_cli/tests/test_cli.py b/tagcube_cli/tests/test_cli.py index <HASH>..<HASH> 100644 --- a/tagcube_cli/tests/test_cli.py +++ b/tagcube_cli/tests/test_cli.py @@ -1,4 +1,7 @@ import unittest +import tempfile +import shutil +import os from mock import patch @@ -8,6 +11,20 @@ from tagcube_cli.cli import TagCubeCLI class TestTagCubeCLI(unittest.TestCase): SIMPLE_ARGS = ['--target-url=http://target.com'] + TAGCUBE_FILE = '.tagcube' + TAGCUBE_FILE_BACKUP = '.tagcube-unittest-backup' + + def setUp(self): + super(TestTagCubeCLI, self).setUp() + + if os.path.exists(self.TAGCUBE_FILE): + shutil.move(self.TAGCUBE_FILE, self.TAGCUBE_FILE_BACKUP) + + def tearDown(self): + super(TestTagCubeCLI, self).tearDown() + + if os.path.exists(self.TAGCUBE_FILE_BACKUP): + shutil.move(self.TAGCUBE_FILE_BACKUP, self.TAGCUBE_FILE) def test_user_pass_environment(self): with patch.dict('os.environ', {'TAGCUBE_EMAIL': 'x@y.com', @@ -29,3 +46,28 @@ class TestTagCubeCLI(unittest.TestCase): tagcube_cli = TagCubeCLI.from_cmd_args(parsed_args) self.assertEqual(tagcube_cli.client.email, 'x@y.com') self.assertEqual(tagcube_cli.client.api_key, 'w') + + def test_parse_path_file_ok(self): + path_file = '/foo\n/bar' + + fh = tempfile.NamedTemporaryFile('w', delete=False) + fh.write(path_file) + fh.close() + + args = self.SIMPLE_ARGS + ['--path-file=%s' % fh.name] + parsed_args = TagCubeCLI.parse_args(args) + self.assertEqual(parsed_args.path_file, ['/foo', '/bar']) + + def test_parse_path_file_incorrect_format(self): + path_file = 'bar' + + fh = tempfile.NamedTemporaryFile('w', delete=False) + fh.write(path_file) + fh.close() + + args = self.SIMPLE_ARGS + ['--path-file=%s' % fh.name] + + with patch('argparse._sys.exit') as exit_mock,\ + patch('argparse._sys.stderr') as stderr_mock: + TagCubeCLI.parse_args(args) + self.assertEqual(exit_mock.call_count, 1) \ No newline at end of file
Parsing the path file into a list
tagcubeio_tagcube-cli
train
7f59c455c9a41b4322f29b91653405a19548dc81
diff --git a/lib/netsuite/records/customer_deposit.rb b/lib/netsuite/records/customer_deposit.rb index <HASH>..<HASH> 100644 --- a/lib/netsuite/records/customer_deposit.rb +++ b/lib/netsuite/records/customer_deposit.rb @@ -20,7 +20,7 @@ module NetSuite fields :custom_form, :payment, :tran_date, :exchange_rate, :undep_funds, :memo, :check_num - record_refs :customer, :sales_order, :account + record_refs :customer, :sales_order, :account, :department attr_reader :internal_id attr_accessor :external_id
adding department record reference to customer deposit record
NetSweet_netsuite
train
025505ba0761fc60bc366571a6cb7a80512a6e82
diff --git a/molgenis-data-rest/src/main/java/org/molgenis/data/rest/v2/AttributeFilter.java b/molgenis-data-rest/src/main/java/org/molgenis/data/rest/v2/AttributeFilter.java index <HASH>..<HASH> 100644 --- a/molgenis-data-rest/src/main/java/org/molgenis/data/rest/v2/AttributeFilter.java +++ b/molgenis-data-rest/src/main/java/org/molgenis/data/rest/v2/AttributeFilter.java @@ -24,8 +24,6 @@ import org.molgenis.data.Entity; */ class AttributeFilter implements Iterable<Entry<String, AttributeFilter>> { - public static final AttributeFilter ALL_ATTRS_FILTER = new AttributeFilter().setIncludeAllAttrs(true); - private final Map<String, AttributeFilter> attributes; private boolean includeAllAttrs; private boolean includeIdAttr;
Remove unused and dangerous ALL_ATTRS_FILTER, the filters are not immutable.
molgenis_molgenis
train
31105cc472b33412ef024f11fecc3f8cca33cfd8
diff --git a/octodns/cmds/sync.py b/octodns/cmds/sync.py index <HASH>..<HASH> 100755 --- a/octodns/cmds/sync.py +++ b/octodns/cmds/sync.py @@ -25,18 +25,19 @@ def main(): parser.add_argument('zone', nargs='*', default=[], help='Limit sync to the specified zone(s)') - # --sources isn't an option here b/c filtering sources out would be super - # dangerous since you could easily end up with an empty zone and delete - # everything, or even just part of things when there are multiple sources - + parser.add_argument('--source', default=[], action='append', + help='Limit sync to zones with the specified ' + 'source(s) (all sources will be synchronized for the ' + 'selected zones)') parser.add_argument('--target', default=[], action='append', help='Limit sync to the specified target(s)') args = parser.parse_args() manager = Manager(args.config_file) - manager.sync(eligible_zones=args.zone, eligible_targets=args.target, - dry_run=not args.doit, force=args.force) + manager.sync(eligible_zones=args.zone, eligible_sources=args.source, + eligible_targets=args.target, dry_run=not args.doit, + force=args.force) if __name__ == '__main__': diff --git a/octodns/manager.py b/octodns/manager.py index <HASH>..<HASH> 100644 --- a/octodns/manager.py +++ b/octodns/manager.py @@ -255,8 +255,8 @@ class Manager(object): return plans - def sync(self, eligible_zones=[], eligible_targets=[], dry_run=True, - force=False): + def sync(self, eligible_zones=[], eligible_sources=[], eligible_targets=[], + dry_run=True, force=False): self.log.info('sync: eligible_zones=%s, eligible_targets=%s, ' 'dry_run=%s, force=%s', eligible_zones, eligible_targets, dry_run, force) @@ -280,6 +280,12 @@ class Manager(object): except KeyError: raise ManagerException('Zone {} is missing targets' .format(zone_name)) + + if (eligible_sources and not + [s for s in sources if s in eligible_sources]): + self.log.info('sync: no eligible sources, skipping') + continue + if eligible_targets: targets = [t for t in targets if t in eligible_targets] diff --git a/tests/test_octodns_manager.py b/tests/test_octodns_manager.py index <HASH>..<HASH> 100644 --- a/tests/test_octodns_manager.py +++ b/tests/test_octodns_manager.py @@ -151,6 +151,14 @@ class TestManager(TestCase): .sync(dry_run=False, force=True) self.assertEquals(25, tc) + def test_eligible_sources(self): + with TemporaryDirectory() as tmpdir: + environ['YAML_TMP_DIR'] = tmpdir.dirname + # Only allow a target that doesn't exist + tc = Manager(get_config_filename('simple.yaml')) \ + .sync(eligible_sources=['foo']) + self.assertEquals(0, tc) + def test_eligible_targets(self): with TemporaryDirectory() as tmpdir: environ['YAML_TMP_DIR'] = tmpdir.dirname
Implement octodns-sync --source It can be useful to only synchronize zones that use a certain source. For example, in a situation where some zones use a dynamic source and others don't, you probably want to synchronize those with a dynamic source regularly, and only synchronize the others when a change is made. Although we only synchronize the zones that use a given source, we still want to synchronize all sources to avoid deleting records that would live in another source of the zone.
github_octodns
train
88526103a2004bc28b20a22def52ac739572f87f
diff --git a/README.md b/README.md index <HASH>..<HASH> 100644 --- a/README.md +++ b/README.md @@ -93,3 +93,25 @@ offset integer: ```php echo $skeVent->time(null, -5); // outputs the time adjusted for US Eastern Standard time ``` + +## Basic-er Usage + +For an instant, non-customized event form, use the magical `skeDoosh()` method. +The only required configuration is connection parameters for your data store. +Call `skeDoosh()` from the exact spot in your code where you want the HTML form +to be displayed: + +```php +Sked::skeDoosh([ + 'data_connector' => [ + 'name' => 'PDO', + 'options' => [ + 'driver' => 'mysql', + 'host' => 'localhost', + 'dbname' => 'homestead', + 'user' => 'homestead', + 'pass' => 'secret', + ], + ], +]); +``` diff --git a/src/SkeForm.php b/src/SkeForm.php index <HASH>..<HASH> 100644 --- a/src/SkeForm.php +++ b/src/SkeForm.php @@ -87,6 +87,8 @@ class SkeForm { { if (!isset($aAttribs['method'])) $aAttribs['method'] = 'POST'; + $aAttribs['class'] = isset($aAttribs['class']) + ? $aAttribs['class'] . ' sked-form' : 'sked-form'; $this->aAttribs = $aAttribs; return $this; } @@ -157,7 +159,7 @@ class SkeForm { 'type' => 'select', 'options' => range(1, 31), 'attribs' => [ - 'label' => 'Repeate every', + 'label' => 'Repeat every', // 'disabled' => true, ], ], diff --git a/src/Sked.php b/src/Sked.php index <HASH>..<HASH> 100644 --- a/src/Sked.php +++ b/src/Sked.php @@ -86,6 +86,36 @@ class Sked { */ public static function skeDoosh(array $aOptions) { + // Load JS + echo <<<EOD +<script> + if (typeof jQuery == 'undefined') { + skedLoadTag( + 'http://ajax.googleapis.com/ajax/libs/jquery/2.2.0/jquery.min.js', + loadSkedJs + ); + } else { + loadSkedJs(); + } + + function skedLoadTag(strSrc, fnOnload) + { + var eHead = document.getElementsByTagName('head')[0]; + var e$ = document.createElement('script'); + e$.src = strSrc; + if (fnOnload) + e$.onload = fnOnload; + eHead.appendChild(e$); + } + + function loadSkedJs() + { + skedLoadTag('https://raw.githubusercontent.com/CampusUnion/Sked-JS/master/sked.js'); + } +</script> +EOD; + + // Init Sked $sked = new self($aOptions); $skeVent = null; if ($_REQUEST['sked_form'] ?? null === '1') {
Progress on skeDoosh
CampusUnion_Sked
train
fd1b831777265b9cf7a4caa97a4175c1738ccb12
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -32,11 +32,11 @@ for(var i=2; i<process.argv.length;i++){ global.basePath = path.join(process.cwd(),process.argv[i+1]); } }else if(key == '-v' || key == '--verbose'){ - key = '-v'; require('./os/nushi/stubbydb/log').setVerbose(true); + continue; }else if(key == '-l' || key == '--logs'){ - key = '-l'; require('./os/nushi/stubbydb/log').writeLogs(true); + continue; }
fixing a bug. It was ignoring next argument of verbose or logs option
NaturalIntelligence_StubbyDB
train
1020b64d907fb5083adfe838df9c54720d16c59b
diff --git a/src/Typeahead.react.js b/src/Typeahead.react.js index <HASH>..<HASH> 100644 --- a/src/Typeahead.react.js +++ b/src/Typeahead.react.js @@ -222,8 +222,10 @@ const Typeahead = React.createClass({ this._hideDropdown(); break; case RETURN: - let selected = options[activeIndex]; - selected && this._handleAddOption(selected); + if (this.state.showMenu) { + let selected = options[activeIndex]; + selected && this._handleAddOption(selected); + } break; } },
Don't allow selections while menu is closed
ericgio_react-bootstrap-typeahead
train
a4f7bf8ab238a6b2be93528820e4b100cc001cb8
diff --git a/Tests/Stub/StubLocaleTest.php b/Tests/Stub/StubLocaleTest.php index <HASH>..<HASH> 100644 --- a/Tests/Stub/StubLocaleTest.php +++ b/Tests/Stub/StubLocaleTest.php @@ -11,8 +11,8 @@ namespace Symfony\Component\Locale\Tests\Stub; -use Symfony\Component\Intl\Intl; use Symfony\Component\Intl\Util\IcuVersion; +use Symfony\Component\Intl\Util\IntlTestHelper; use Symfony\Component\Locale\Stub\StubLocale; /** @@ -22,8 +22,7 @@ class StubLocaleTest extends \PHPUnit_Framework_TestCase { protected function setUp() { - // Force the use of stub data to have consistent results - Intl::setDataSource(Intl::STUB); + IntlTestHelper::requireIntl($this); parent::setUp(); }
[Intl] Moved stub data to Icu component <I>.x
symfony_locale
train
c6ebb5a1c8f54318783f37bc915ede393fc358ac
diff --git a/calendar-bundle/src/Resources/contao/Events.php b/calendar-bundle/src/Resources/contao/Events.php index <HASH>..<HASH> 100644 --- a/calendar-bundle/src/Resources/contao/Events.php +++ b/calendar-bundle/src/Resources/contao/Events.php @@ -231,6 +231,7 @@ abstract class Events extends Module $span = floor(($intEnd - $intStart + $ds) / 86400); $strDate = date($GLOBALS['TL_CONFIG']['dateFormat'], $intStart); $strDay = $GLOBALS['TL_LANG']['DAYS'][date('w', $intStart)]; + $strMonth = $GLOBALS['TL_LANG']['MONTHS'][(date('n', $intStart)-1)]; if ($span > 0) { @@ -262,6 +263,7 @@ abstract class Events extends Module $arrEvent['time'] = $strTime; $arrEvent['date'] = $strDate; $arrEvent['day'] = $strDay; + $arrEvent['month'] = $strMonth; $arrEvent['parent'] = $intCalendar; $arrEvent['link'] = $objEvents->title; $arrEvent['title'] = specialchars($objEvents->title); diff --git a/calendar-bundle/src/Resources/contao/ModuleEventlist.php b/calendar-bundle/src/Resources/contao/ModuleEventlist.php index <HASH>..<HASH> 100644 --- a/calendar-bundle/src/Resources/contao/ModuleEventlist.php +++ b/calendar-bundle/src/Resources/contao/ModuleEventlist.php @@ -219,6 +219,7 @@ class ModuleEventlist extends Events $objTemplate->title = $event['title']; $objTemplate->time = $event['time']; $objTemplate->link = $event['href']; + $objTemplate->month = $event['month']; $objTemplate->teaser = $event['teaser']; $objTemplate->details = $event['details']; $objTemplate->calendar = $event['calendar']; diff --git a/calendar-bundle/src/Resources/contao/ModuleUpcomingEvents.php b/calendar-bundle/src/Resources/contao/ModuleUpcomingEvents.php index <HASH>..<HASH> 100644 --- a/calendar-bundle/src/Resources/contao/ModuleUpcomingEvents.php +++ b/calendar-bundle/src/Resources/contao/ModuleUpcomingEvents.php @@ -140,6 +140,7 @@ class ModuleUpcomingEvents extends Events $objTemplate->date = date($GLOBALS['TL_CONFIG']['dateFormat'], $day); $objTemplate->time = $event['time']; $objTemplate->link = $event['href']; + $objTemplate->month = $event['month']; $objTemplate->teaser = $event['teaser']; $objTemplate->details = $event['details']; $objTemplate->calendar = $event['calendar'];
[Calendar] Completed tickets #<I>, #<I>, #<I> and #<I>
contao_contao
train
cad1325a90a4ce5d56915467bf1fe52f38814b5a
diff --git a/uncompyle6/parsers/parse22.py b/uncompyle6/parsers/parse22.py index <HASH>..<HASH> 100644 --- a/uncompyle6/parsers/parse22.py +++ b/uncompyle6/parsers/parse22.py @@ -18,6 +18,10 @@ class Python22Parser(Python23Parser): COME_FROM POP_TOP COME_FROM list_for ::= expr for_iter store list_iter CONTINUE JUMP_FORWARD COME_FROM POP_TOP COME_FROM + + # Some versions of Python 2.2 have been found to generate + # PRINT_ITEM_CONT for PRINT_ITEM + print_items_stmt ::= expr PRINT_ITEM_CONT print_items_opt ''' def customize_grammar_rules(self, tokens, customize):
Python <I> code anomoly? Python <I> may generate PRINT_ITEM_CONT in some places for PRINT_ITEM
rocky_python-uncompyle6
train
8eceff996190525d845fe67b5c5a38dd8dc4a968
diff --git a/phraseapp/auth.go b/phraseapp/auth.go index <HASH>..<HASH> 100644 --- a/phraseapp/auth.go +++ b/phraseapp/auth.go @@ -14,7 +14,7 @@ type AuthCredentials struct { Username string `cli:"opt --username desc='username used for authentication'"` Token string `cli:"opt --token desc='token used for authentication'"` TFA bool `cli:"opt --tfa desc='use Two-Factor Authentication'"` - Host string `cli:"opt --host default='https://api.phraseapp.com' desc='Host to send Request to'"` + Host string `cli:"opt --host desc='Host to send Request to'"` } var authC *AuthCredentials @@ -34,6 +34,14 @@ func RegisterAuthCredentials(cmdAuth *AuthCredentials, defaultCredentials *AuthC if cmdAuth.Host != "" { authC.Host = cmdAuth.Host + } else { + if defaultCredentials.Host != "" { + authC.Host = defaultCredentials.Host + } + } + + if authC.Host == "" { + authC.Host = "https://api.phraseapp.com" } notSet := authC.Token == "" && authC.Username == ""
solves default behavior for Host
phrase_phraseapp-go
train
6fb4097fc313739bdb529193453bafa4a187f625
diff --git a/lxd/container_lxc.go b/lxd/container_lxc.go index <HASH>..<HASH> 100644 --- a/lxd/container_lxc.go +++ b/lxd/container_lxc.go @@ -2374,8 +2374,8 @@ func (c *containerLXC) startCommon() (string, error) { return "", err } - // Attempt to disable IPv6 on the host side interface - networkSysctlSet(fmt.Sprintf("ipv6/conf/%s/disable_ipv6", device), "1") + // Attempt to disable IPv6 router advertisement acceptance + networkSysctlSet(fmt.Sprintf("ipv6/conf/%s/accept_ra", device), "0") } } } @@ -7512,8 +7512,8 @@ func (c *containerLXC) createNetworkDevice(name string, m types.Device) (string, return "", fmt.Errorf("Failed to add interface to bridge: %s", err) } - // Attempt to disable IPv6 on the host side interface - networkSysctlSet(fmt.Sprintf("ipv6/conf/%s/disable_ipv6", n1), "1") + // Attempt to disable router advertisement acceptance + networkSysctlSet(fmt.Sprintf("ipv6/conf/%s/accept_ra", n1), "0") } dev = n2 @@ -7531,8 +7531,8 @@ func (c *containerLXC) createNetworkDevice(name string, m types.Device) (string, return "", err } - // Attempt to disable IPv6 on the host side interface - networkSysctlSet(fmt.Sprintf("ipv6/conf/%s/disable_ipv6", device), "1") + // Attempt to disable IPv6 router advertisement acceptance + networkSysctlSet(fmt.Sprintf("ipv6/conf/%s/accept_ra", device), "0") } }
lxd/container: Changes disable_ipv6=1 to accept_ra=0 on host side interface This allows host side static IPv6 routes to be added.
lxc_lxd
train
f4cab0fc43bc88c120d67489f7d195828f33e198
diff --git a/angr/analyses/cfg.py b/angr/analyses/cfg.py index <HASH>..<HASH> 100644 --- a/angr/analyses/cfg.py +++ b/angr/analyses/cfg.py @@ -775,30 +775,43 @@ class CFG(Analysis, CFGBase): jumpkind = 'Ijk_Boring' if jumpkind is None else jumpkind sim_run = self.project.factory.sim_run(current_entry.state, jumpkind=jumpkind) - except (simuvex.SimFastPathError, simuvex.SimSolverModeError): - # Got a SimFastPathError. We wanna switch to symbolic mode for current IRSB. - l.debug('Switch to symbolic mode for address 0x%x', addr) - # Make a copy of the current 'fastpath' state - - l.debug('Symbolic jumps at basic block 0x%x.' % addr) - - new_state = None - if addr != current_function_addr: - new_state = self._get_symbolic_function_initial_state(current_function_addr) - - if new_state is None: - new_state = current_entry.state.copy() - new_state.set_mode('symbolic') - new_state.options.add(simuvex.o.DO_RET_EMULATION) - # Remove bad constraints - # FIXME: This is so hackish... - new_state.se._solver.constraints = [c for c in new_state.se.constraints if - c.op != 'I' or c.args[0] is not False] - new_state.se._solver._result = None - # Swap them - saved_state, current_entry.state = current_entry.state, new_state - sim_run, error_occurred, _ = self._get_simrun(addr, current_entry) - except simuvex.SimIRSBError as ex: + except (simuvex.SimFastPathError, simuvex.SimSolverModeError) as ex: + + if saved_state.mode == 'fastpath': + # Got a SimFastPathError or SimSolverModeError in FastPath mode. + # We wanna switch to symbolic mode for current IRSB. + l.debug('Switch to symbolic mode for address 0x%x', addr) + # Make a copy of the current 'fastpath' state + + l.debug('Symbolic jumps at basic block 0x%x.' % addr) + + new_state = None + if addr != current_function_addr: + new_state = self._get_symbolic_function_initial_state(current_function_addr) + + if new_state is None: + new_state = current_entry.state.copy() + new_state.set_mode('symbolic') + new_state.options.add(simuvex.o.DO_RET_EMULATION) + # Remove bad constraints + # FIXME: This is so hackish... + new_state.se._solver.constraints = [c for c in new_state.se.constraints if + c.op != 'I' or c.args[0] is not False] + new_state.se._solver._result = None + # Swap them + saved_state, current_entry.state = current_entry.state, new_state + sim_run, error_occurred, _ = self._get_simrun(addr, current_entry) + + else: + # Got a SimSolverModeError in symbolic mode. We are screwed. + # Skip this IRSB + l.debug("Caught a SimIRSBError. Don't panic, this is usually expected.", ex) + error_occurred = True + sim_run = \ + simuvex.procedures.SimProcedures["stubs"]["PathTerminator"]( + state, addr=addr) + + except simuvex.SimIRSBError: # It's a tragedy that we came across some instructions that VEX # does not support. I'll create a terminating stub there l.error("SimIRSBError occurred(%s). Creating a PathTerminator.", ex)
Fix a potential infinite loop in CFG recovery. Before this fix, after switching from fastpath mode to symbolic mode in CFG._get_simrun(), it would stay in symbolic mode and try to build the SimRun forever if claripy raises an exception.
angr_angr
train
f143735969122833e8dbaccae7e89d362f67f66d
diff --git a/tornadose/handlers.py b/tornadose/handlers.py index <HASH>..<HASH> 100644 --- a/tornadose/handlers.py +++ b/tornadose/handlers.py @@ -50,8 +50,8 @@ class EventSource(BaseHandler): * It is a normal HTTP connection and so can be more easily monitored than websockets using tools like curl__ or HTTPie__. - * Browsers generally automatically try to reestablish a lost - connection. + * Browsers generally try to reestablish a lost connection + automatically. * The publish/subscribe pattern is better suited to some applications than the full duplex model of websockets.
Reword point on browsers reconnecting to SSE
mivade_tornadose
train
ac7e3c6b5c13c18b31c8f779b77ef9219769c7ce
diff --git a/eq3bt/connection.py b/eq3bt/connection.py index <HASH>..<HASH> 100644 --- a/eq3bt/connection.py +++ b/eq3bt/connection.py @@ -15,12 +15,13 @@ _LOGGER = logging.getLogger(__name__) class BTLEConnection(btle.DefaultDelegate): """Representation of a BTLE Connection.""" - def __init__(self, mac): + def __init__(self, mac, iface): """Initialize the connection.""" btle.DefaultDelegate.__init__(self) self._conn = None self._mac = mac + self._iface = iface self._callbacks = {} def __enter__(self): @@ -33,11 +34,11 @@ class BTLEConnection(btle.DefaultDelegate): self._conn.withDelegate(self) _LOGGER.debug("Trying to connect to %s", self._mac) try: - self._conn.connect(self._mac) + self._conn.connect(self._mac, iface=self._iface) except btle.BTLEException as ex: _LOGGER.debug("Unable to connect to the device %s, retrying: %s", self._mac, ex) try: - self._conn.connect(self._mac) + self._conn.connect(self._mac, iface=self._iface) except Exception as ex2: _LOGGER.debug("Second connection try to %s failed: %s", self._mac, ex2) raise @@ -77,3 +78,4 @@ class BTLEConnection(btle.DefaultDelegate): except btle.BTLEException as ex: _LOGGER.debug("Got exception from bluepy while making a request: %s", ex) raise + diff --git a/eq3bt/eq3btsmart.py b/eq3bt/eq3btsmart.py index <HASH>..<HASH> 100644 --- a/eq3bt/eq3btsmart.py +++ b/eq3bt/eq3btsmart.py @@ -71,7 +71,7 @@ class TemperatureException(Exception): class Thermostat: """Representation of a EQ3 Bluetooth Smart thermostat.""" - def __init__(self, _mac, connection_cls=BTLEConnection): + def __init__(self, _mac, _iface=None, connection_cls=BTLEConnection): """Initialize the thermostat.""" self._target_temperature = Mode.Unknown @@ -94,7 +94,7 @@ class Thermostat: self._firmware_version = None self._device_serial = None - self._conn = connection_cls(_mac) + self._conn = connection_cls(_mac, _iface) self._conn.set_callback(PROP_NTFY_HANDLE, self.handle_notification) def __str__(self): @@ -464,3 +464,4 @@ class Thermostat: def device_serial(self): """Return the device serial number.""" return self._device_serial + diff --git a/eq3bt/eq3cli.py b/eq3bt/eq3cli.py index <HASH>..<HASH> 100644 --- a/eq3bt/eq3cli.py +++ b/eq3bt/eq3cli.py @@ -21,16 +21,17 @@ def validate_mac(ctx, param, mac): @click.group(invoke_without_command=True) @click.option('--mac', envvar="EQ3_MAC", required=True, callback=validate_mac) +@click.option('--interface', default=None) @click.option('--debug/--normal', default=False) @click.pass_context -def cli(ctx, mac, debug): +def cli(ctx, mac, interface, debug): """ Tool to query and modify the state of EQ3 BT smart thermostat. """ if debug: logging.basicConfig(level=logging.DEBUG) else: logging.basicConfig(level=logging.INFO) - thermostat = Thermostat(mac) + thermostat = Thermostat(mac, interface) thermostat.update() ctx.obj = thermostat
Add bt interface selection (#<I>) * Add bt interface selection * Update eq3cli.py Fix interface selection in cli
rytilahti_python-eq3bt
train
fcd6bce775a93a5ed56ddd0bde81d463248d55ee
diff --git a/src/apicache.js b/src/apicache.js index <HASH>..<HASH> 100644 --- a/src/apicache.js +++ b/src/apicache.js @@ -414,7 +414,7 @@ function ApiCache() { if (redis) { try { redis.hgetall(key, function (err, obj) { - if (!err && obj) { + if (!err && obj && obj.response) { var elapsed = new Date() - req.apicacheTimer debug('sending cached (redis) version of', key, logDuration(elapsed))
Add support for ioredis
kwhitley_apicache
train
0abd0c5e6591a9bcb1ecf59b22f0921451f3039c
diff --git a/src/bbn/appui/cron.php b/src/bbn/appui/cron.php index <HASH>..<HASH> 100644 --- a/src/bbn/appui/cron.php +++ b/src/bbn/appui/cron.php @@ -160,8 +160,8 @@ class cron extends \bbn\obj{ FROM {$this->table} WHERE active = 1 AND next < ?". - ( is_int($id_cron) ? " AND id_cron = $id_cron" : "" )." - ORDER BY next ASC + ( is_int($id_cron) ? " AND `id_cron` = $id_cron" : "" )." + ORDER BY `priority` ASC, `next` ASC LIMIT 1", date('Y-m-d H:i:s'))) ){ // Dans cfg: timeout, et soit: latency, minute, hour, day of month, day of week, date @@ -192,13 +192,7 @@ class cron extends \bbn\obj{ $d['cfg'] = json_decode($d['cfg'], 1); } } - - private function get_latency($id_journal){ - if ( $this->check() && is_int($id_journal) ){ - - } - } - + public function run($id_cron = null){ if ( ($cron = $this->get_next($id_cron)) ){ $ok = 1; @@ -207,7 +201,6 @@ class cron extends \bbn\obj{ $start = strtotime($runner['start']); $timeout = $runner['cfg']['timeout']; if ( ($start + $timeout) > time() ){ - $this->alert(); } $ok = false; @@ -217,11 +210,20 @@ class cron extends \bbn\obj{ $output = $this->_exec($cron['file'], $cron['cfg']); $time = $this->finish($id, $output); \bbn\tools::dump("Execution of ".$cron['file']." (Journal ID: $id) in $time secs", $output); - return 1; + return $time; } } + return false; } - + + public function run_all(){ + $time = 0; + while ( ($time < $this->timeout) && ($ctx = $this->run()) ){ + $time += $ctx; + } + return $time; + } + private function _exec($file, $data=[]){ $this->mvc->data = $data; $this->obj = new \stdClass();
Added run_all in appui\cron
nabab_bbn
train
e91d5d7121394007c4133c551c859f311a70b44f
diff --git a/dispatch/static/manager/src/js/components/ColumnEditor/ColumnForm.js b/dispatch/static/manager/src/js/components/ColumnEditor/ColumnForm.js index <HASH>..<HASH> 100644 --- a/dispatch/static/manager/src/js/components/ColumnEditor/ColumnForm.js +++ b/dispatch/static/manager/src/js/components/ColumnEditor/ColumnForm.js @@ -73,6 +73,7 @@ export default function ColumnForm(props) { error={props.errors.article_ids}> <ArticleSelectInput selected={articles} + section={props.listItem.section ? props.listItem.section.id : null} many={true} onChange={(articles) => updateArticles(articles)} /> </FormInput> diff --git a/dispatch/static/manager/src/js/components/inputs/selects/ArticleSelectInput.js b/dispatch/static/manager/src/js/components/inputs/selects/ArticleSelectInput.js index <HASH>..<HASH> 100644 --- a/dispatch/static/manager/src/js/components/inputs/selects/ArticleSelectInput.js +++ b/dispatch/static/manager/src/js/components/inputs/selects/ArticleSelectInput.js @@ -14,6 +14,10 @@ class ArticleSelectInputComponent extends React.Component { queryObj['q'] = query } + if (this.props.section) { + queryObj['section'] = this.props.section + } + this.props.listArticles(this.props.token, queryObj) }
limit article select input in columns form to only show articles from the same sections as the column
ubyssey_dispatch
train
1f6b121d3c7b4e2deb694f02499e3c9a817a84a5
diff --git a/ext/rest/src/main/java/org/minimalj/rest/RestServer.java b/ext/rest/src/main/java/org/minimalj/rest/RestServer.java index <HASH>..<HASH> 100644 --- a/ext/rest/src/main/java/org/minimalj/rest/RestServer.java +++ b/ext/rest/src/main/java/org/minimalj/rest/RestServer.java @@ -57,21 +57,4 @@ public class RestServer { start(); } - /** - * To use inner classes as main class you have to use - * <pre> - * java org.minimalj.rest.RestServer$WithFrontend - * </pre> - * - * Note the $ instead of . - */ - public static class WithFrontend { - - public static void main(String[] args) { - Application.initApplication(args); - NanoWebServer.start(); - RestServer.start(); - } - } - } \ No newline at end of file
It's now allowed to start two frontends with the same application Like this: XyApplication application = new XyApplication(); NanoWebServer.start(application); RestServer.start(application); WithFrontend not needed anymore
BrunoEberhard_minimal-j
train
75e39df00d50e828136daa19ea7287e4f43539cd
diff --git a/graylog2-server/src/main/java/org/graylog2/periodical/PurgeExpiredAgentsThread.java b/graylog2-server/src/main/java/org/graylog2/periodical/PurgeExpiredAgentsThread.java index <HASH>..<HASH> 100644 --- a/graylog2-server/src/main/java/org/graylog2/periodical/PurgeExpiredAgentsThread.java +++ b/graylog2-server/src/main/java/org/graylog2/periodical/PurgeExpiredAgentsThread.java @@ -81,6 +81,7 @@ public class PurgeExpiredAgentsThread extends Periodical { @Override public void doRun() { final Duration threshold = configuration.getAgentExpirationThreshold(); - agentService.destroyExpired(Ints.checkedCast(threshold.getQuantity()), threshold.getUnit()); + final int purgedAgents = agentService.destroyExpired(Ints.checkedCast(threshold.getQuantity()), threshold.getUnit()); + LOG.debug("Purged {} inactive agents.", purgedAgents); } }
Adding debug message logging how many inactive agents were purged.
Graylog2_graylog2-server
train
663404074ff1e4e5bcc7258279168136d7eabe46
diff --git a/lib/s3direct/upload_request.rb b/lib/s3direct/upload_request.rb index <HASH>..<HASH> 100644 --- a/lib/s3direct/upload_request.rb +++ b/lib/s3direct/upload_request.rb @@ -85,7 +85,7 @@ module S3Direct # sign our request by Base64 encoding the policy document. def s3_upload_signature signature = OpenSSL::HMAC.digest( - OpenSSL::Digest::Digest.new('sha1'), + OpenSSL::Digest.new('sha1'), config.secret_key, s3_upload_policy_document )
Remove deprecated use of OpenSSL::Digest::Digest
Kajabi_s3direct
train
71c419513d1b07677ad33f12a7f71847a54245da
diff --git a/cleverhans/attacks.py b/cleverhans/attacks.py index <HASH>..<HASH> 100644 --- a/cleverhans/attacks.py +++ b/cleverhans/attacks.py @@ -514,7 +514,7 @@ class SaliencyMapMethod(Attack): theta=self.theta, gamma=self.gamma, clip_min=self.clip_min, clip_max=self.clip_max) - def parse_params(self, theta=1., gamma=np.inf, nb_classes=None, + def parse_params(self, theta=1., gamma=1., nb_classes=None, clip_min=0., clip_max=1., y_target=None, **kwargs): """ Take in a dictionary of parameters and applies attack-specific checks
Change the default value of gamma for JSMA from np.inf to 1.
tensorflow_cleverhans
train
fa658c43d344f52cd3d6d3a7f1e9d78571c806cf
diff --git a/lib/validate.js b/lib/validate.js index <HASH>..<HASH> 100644 --- a/lib/validate.js +++ b/lib/validate.js @@ -4,6 +4,9 @@ var path = require('path'); var deepFreeze = require('deep-freeze'); +var RE_LAST_URL_SECTION = /([^\/]+)\/?$/i; +var RE_QUERY = /\?.*$/; + function formatTraceData(output, data){ if (data) { // as validators may deal with freezed data, lets copy out input. @@ -25,6 +28,18 @@ function formatTraceData(output, data){ if (entry.file.length > 25) { entry.file = entry.file.substring(0, 25); } + + if (!entry.file && entry.sourceURL){ + var match = entry.sourceURL.replace(RE_QUERY, '').match(RE_LAST_URL_SECTION); + if (match && match[1]){ + entry.file = match[1]; + } + + if (!entry.file){ + entry.file = path.dirname(entry.sourceURL); + } + } + return true; }
add better fallbacks for filename on traces
gardr_validator
train
b9b183942e29aee195755c49a76b19c0a70ef7a3
diff --git a/server/src/test/java/org/cloudfoundry/identity/uaa/authorization/external/LdapGroupMappingAuthorizationManagerTests.java b/server/src/test/java/org/cloudfoundry/identity/uaa/authorization/external/LdapGroupMappingAuthorizationManagerTests.java index <HASH>..<HASH> 100644 --- a/server/src/test/java/org/cloudfoundry/identity/uaa/authorization/external/LdapGroupMappingAuthorizationManagerTests.java +++ b/server/src/test/java/org/cloudfoundry/identity/uaa/authorization/external/LdapGroupMappingAuthorizationManagerTests.java @@ -84,7 +84,6 @@ public class LdapGroupMappingAuthorizationManagerTests extends JdbcTestBase { Map<String, Map<String, List>> originMap = new HashMap<>(); Map<String, List> externalGroupMap = new HashMap<>(); - externalGroupMap.put("cn=Engineering,ou=groups,dc=example,dc=com", Collections.singletonList("acme")); externalGroupMap.put("cn=HR,ou=groups,dc=example,dc=com", Collections.singletonList("acme")); externalGroupMap.put("cn=mgmt,ou=groups,dc=example,dc=com", Collections.singletonList("acme")); externalGroupMap.put("cn=Engineering,ou=groups,dc=example,dc=com", Collections.singletonList("acme.dev"));
Remove line that does nothing This element gets overwritten three lines below [#<I>]
cloudfoundry_uaa
train
215dea449df859097aad98c9aed7d4ca04c1bbce
diff --git a/hazelcast/src/main/java/com/hazelcast/client/impl/spi/impl/TranslateToPublicAddressProvider.java b/hazelcast/src/main/java/com/hazelcast/client/impl/spi/impl/TranslateToPublicAddressProvider.java index <HASH>..<HASH> 100644 --- a/hazelcast/src/main/java/com/hazelcast/client/impl/spi/impl/TranslateToPublicAddressProvider.java +++ b/hazelcast/src/main/java/com/hazelcast/client/impl/spi/impl/TranslateToPublicAddressProvider.java @@ -73,6 +73,9 @@ class TranslateToPublicAddressProvider { String publicIpEnabledProperty = properties.getString(ClientProperty.DISCOVERY_SPI_PUBLIC_IP_ENABLED); if (publicIpEnabledProperty == null) { SSLConfig sslConfig = config.getSSLConfig(); + // When ssl is enabled, we don't want to check if addresses are accessible and return false with a log. + // Because when client tries to check if addresses are reachable, because of SSL handshakes the members prints + // too many warnings which will alarm the users even if the behaviour is expected. if (sslConfig != null && sslConfig.isEnabled()) { if (logger.isFineEnabled()) { logger.fine("SSL is configured. The client will use internal addresses to communicate with the cluster. If "
doc: explain why ssl needs explicit config for public address (#<I>) * doc: explain why ssl needs explicit config for public address * Update hazelcast/src/main/java/com/hazelcast/client/impl/spi/impl/TranslateToPublicAddressProvider.java
hazelcast_hazelcast
train
7f944c260bb92d27411ab0c8b1fa0d9ce2227544
diff --git a/newsletter-bundle/contao/Newsletter.php b/newsletter-bundle/contao/Newsletter.php index <HASH>..<HASH> 100644 --- a/newsletter-bundle/contao/Newsletter.php +++ b/newsletter-bundle/contao/Newsletter.php @@ -616,9 +616,10 @@ class Newsletter extends Backend * Synchronize newsletter subscription of existing users * @param mixed * @param object + * @param object * @return mixed */ - public function synchronize($varValue, $objUser) + public function synchronize($varValue, $objUser, $objModule=null) { // Return if there is no user (e.g. upon registration) if (is_null($objUser)) @@ -652,9 +653,15 @@ class Newsletter extends Backend $time = time(); $varValue = deserialize($varValue, true); - // Get all channel IDs - $objChannel = $this->Database->execute("SELECT id FROM tl_newsletter_channel"); - $arrChannel = $objChannel->fetchEach('id'); + // Get all channel IDs (thanks to Andreas Schempp) + if ($blnIsFrontend && $objModule instanceof Module) + { + $arrChannel = deserialize($objModule->newsletters); + } + else + { + $arrChannel = $this->Database->execute("SELECT id FROM tl_newsletter_channel")->fetchEach('id'); + } $arrDelete = array_values(array_diff($arrChannel, $varValue));
[Newsletter] Fixed the issue with lost newsletter subscriptions (#<I>)
contao_contao
train
1413acd3e0beeb1d1151491e15d9684aff4b5725
diff --git a/lib/tabletojson.js b/lib/tabletojson.js index <HASH>..<HASH> 100644 --- a/lib/tabletojson.js +++ b/lib/tabletojson.js @@ -123,6 +123,9 @@ exports.convertUrl = function(url, arg1, arg2) { function fetchUrl(url, callback) { var deferred = Q.defer(); request(url, function (error, response, body) { + if(error){ + deferred.reject(error); + } deferred.resolve(body); }); return deferred.promise;
added rejection on request promise, to catch it
maugenst_tabletojson
train
eac03e16e7213e2ddc7e73818e91a0f21e571bda
diff --git a/src/Composer/Command/CreateProjectCommand.php b/src/Composer/Command/CreateProjectCommand.php index <HASH>..<HASH> 100644 --- a/src/Composer/Command/CreateProjectCommand.php +++ b/src/Composer/Command/CreateProjectCommand.php @@ -288,13 +288,13 @@ EOT } $composer = Factory::create($io, $config->all(), $disablePlugins); - $eventDispatcher = $composer->getEventDispatcher(); + $config = $composer->getConfig(); + $rm = $composer->getRepositoryManager(); if (null === $repository) { - $rm = RepositoryFactory::manager($io, $config, $eventDispatcher, Factory::createRemoteFilesystem($io, $config)); $sourceRepo = new CompositeRepository(RepositoryFactory::defaultRepos($io, $config, $rm)); } else { - $sourceRepo = RepositoryFactory::fromString($io, $config, $repository, true); + $sourceRepo = RepositoryFactory::fromString($io, $config, $repository, true, $rm); } $parser = new VersionParser(); @@ -389,13 +389,13 @@ EOT $package = $package->getAliasOf(); } - $dm = $this->createDownloadManager($io, $config, $eventDispatcher); + $dm = $composer->getDownloadManager(); $dm->setPreferSource($preferSource) ->setPreferDist($preferDist) ->setOutputProgress(!$noProgress); $projectInstaller = new ProjectInstaller($directory, $dm); - $im = $this->createInstallationManager(); + $im = $composer->getInstallationManager(); $im->addInstaller($projectInstaller); $im->install(new InstalledFilesystemRepository(new JsonFile('php://memory')), new InstallOperation($package)); $im->notifyInstalls($io); @@ -413,16 +413,4 @@ EOT return $installedFromVcs; } - - protected function createDownloadManager(IOInterface $io, Config $config, EventDispatcher $eventDispatcher) - { - $factory = new Factory(); - - return $factory->createDownloadManager($io, $config, $eventDispatcher); - } - - protected function createInstallationManager() - { - return new InstallationManager(); - } } diff --git a/src/Composer/Repository/RepositoryFactory.php b/src/Composer/Repository/RepositoryFactory.php index <HASH>..<HASH> 100644 --- a/src/Composer/Repository/RepositoryFactory.php +++ b/src/Composer/Repository/RepositoryFactory.php @@ -62,11 +62,11 @@ class RepositoryFactory * @param bool $allowFilesystem * @return RepositoryInterface */ - public static function fromString(IOInterface $io, Config $config, $repository, $allowFilesystem = false) + public static function fromString(IOInterface $io, Config $config, $repository, $allowFilesystem = false, RepositoryManager $rm = null) { $repoConfig = static::configFromString($io, $config, $repository, $allowFilesystem); - return static::createRepo($io, $config, $repoConfig); + return static::createRepo($io, $config, $repoConfig, $rm); } /** @@ -75,9 +75,11 @@ class RepositoryFactory * @param array $repoConfig * @return RepositoryInterface */ - public static function createRepo(IOInterface $io, Config $config, array $repoConfig) + public static function createRepo(IOInterface $io, Config $config, array $repoConfig, RepositoryManager $rm = null) { - $rm = static::manager($io, $config, null, Factory::createRemoteFilesystem($io, $config)); + if (!$rm) { + $rm = static::manager($io, $config, null, Factory::createRemoteFilesystem($io, $config)); + } $repos = static::createRepos($rm, array($repoConfig)); return reset($repos);
Reuse repository manager and others directly from the Composer instance, refs #<I>
composer_composer
train
d8fb56a0d323009924a2d999c1408b84541e730c
diff --git a/src/js/core/icon.js b/src/js/core/icon.js index <HASH>..<HASH> 100644 --- a/src/js/core/icon.js +++ b/src/js/core/icon.js @@ -32,10 +32,11 @@ export default function (UIkit) { var dimensions = el.attr('viewBox'); if (dimensions) { dimensions = dimensions.split(' '); - dimensions = ` width="${dimensions[2]}" height="${dimensions[3]}"`; + this.width = this.width || dimensions[2]; + this.height = this.height || dimensions[3]; } - this.insert(`<svg${dimensions} class="${this.cls}"><use xlink:href="${src}#${this.icon}"/></svg>`); + this.insert(`<svg class="${this.cls}"><use xlink:href="${src}#${this.icon}"/></svg>`)[0].classList.add('uk-icon'); }); } diff --git a/src/js/mixin/svg.js b/src/js/mixin/svg.js index <HASH>..<HASH> 100644 --- a/src/js/mixin/svg.js +++ b/src/js/mixin/svg.js @@ -4,7 +4,9 @@ var svgs = {}; export default { - props: ['id', 'class', 'style', 'width', 'height'], + props: {id: String, class: String, style: String, width: Number, height: Number, ratio: Number}, + + defaults: {ratio: 1}, methods: { @@ -20,6 +22,9 @@ export default { svg = $(svg); + this.width *= this.ratio; + this.height *= this.ratio; + for (var prop in this.$options.props) { if (prop !== 'src' && this[prop]) { svg.attr(prop, this[prop]); @@ -34,7 +39,7 @@ export default { svg.removeAttr('width'); } - this.$el.replaceWith(svg[0].outerHTML); + return $(svg[0].outerHTML).replaceAll(this.$el); } diff --git a/tests/icon.html b/tests/icon.html index <HASH>..<HASH> 100644 --- a/tests/icon.html +++ b/tests/icon.html @@ -194,6 +194,12 @@ <td>''</td> <td>The icon to display.</td> </tr> + <tr> + <td><code>ratio</code></td> + <td>Number</td> + <td>1</td> + <td>The icon size ratio.</td> + </tr> </tbody> </table>
added ratio attribute to icons; added 'uk-icon' to icons
uikit_uikit
train
ad08a2d6eee62b027da12b87e75c49429791adb7
diff --git a/lib/badgerfish/ox_sax_parser.rb b/lib/badgerfish/ox_sax_parser.rb index <HASH>..<HASH> 100644 --- a/lib/badgerfish/ox_sax_parser.rb +++ b/lib/badgerfish/ox_sax_parser.rb @@ -14,6 +14,7 @@ module Badgerfish end @html_entities_coder = HTMLEntities.new + @remove_namespaces = !!options[:remove_namespaces] @result = @root = {} @parents = [] Ox.sax_parse(self, StringIO.new(xml)) @@ -46,7 +47,7 @@ module Badgerfish end def attr(name, value) - unless name.to_s.start_with? 'xmlns' + if @remove_namespaces || !name.to_s.start_with?('xmlns') @root["@#{name}"] = value else @root['@xmlns'] ||= {}
Added option to disable/remove namespaces for performance
msievers_badgerfish
train
046e4080b1913b24c25fa78a34b23cdc0e549d73
diff --git a/preferences/Preference.js b/preferences/Preference.js index <HASH>..<HASH> 100644 --- a/preferences/Preference.js +++ b/preferences/Preference.js @@ -1,6 +1,5 @@ // See https://github.com/jenkinsci/js-storage -const storage = require('@jenkins-cd/storage/storage'); -const StorageNamespace = require('@jenkins-cd/storage/StorageNamespace'); +const storage = require('@jenkins-cd/storage'); const DEFAULT_NS_NAME = 'jenkins-preferences'; @@ -17,7 +16,8 @@ const DEFAULT_NS_NAME = 'jenkins-preferences'; * preference.newPreference('key', ['defaultValue', 'allowedValues', 'namespace']); */ function Preference(key, defaultValue, allowedValues, namespace ) { - const preferences = new StorageNamespace(namespace || DEFAULT_NS_NAME, storage.local); + + const preferences = storage.localNamespace(namespace || DEFAULT_NS_NAME); if (key === undefined) { throw new Error('Cannot create preference. Preference "key" name must be specified.'); }
[Allow_custom_nameSpaces] Drop import of submodules and use the official API. Thanks @tfennelly for pointing out.
jenkinsci_js-preferences
train
f6ef1dded422f5dd0836a3e076fc3d5e6acaa422
diff --git a/src/test/java/com/github/dockerjava/client/AbstractDockerClientTest.java b/src/test/java/com/github/dockerjava/client/AbstractDockerClientTest.java index <HASH>..<HASH> 100644 --- a/src/test/java/com/github/dockerjava/client/AbstractDockerClientTest.java +++ b/src/test/java/com/github/dockerjava/client/AbstractDockerClientTest.java @@ -34,7 +34,7 @@ public abstract class AbstractDockerClientTest extends Assert { LOG.info("Pulling image 'busybox'"); // need to block until image is pulled completely - logResponseStream(dockerClient.pullImageCmd("busybox:latest").exec()); + logResponseStream(dockerClient.pullImageCmd("busybox").withTag("latest").exec()); diff --git a/src/test/java/com/github/dockerjava/client/command/StartContainerCmdTest.java b/src/test/java/com/github/dockerjava/client/command/StartContainerCmdTest.java index <HASH>..<HASH> 100644 --- a/src/test/java/com/github/dockerjava/client/command/StartContainerCmdTest.java +++ b/src/test/java/com/github/dockerjava/client/command/StartContainerCmdTest.java @@ -116,10 +116,10 @@ public class StartContainerCmdTest extends AbstractDockerClientTest { contains(tcp22, tcp23)); assertThat(inspectContainerResponse.getHostConfig().getPortBindings().getBindings().get(tcp22), - is(equalTo(Ports.Binding("0.0.0.0", 11022)))); + is(equalTo(Ports.Binding(11022)))); assertThat(inspectContainerResponse.getHostConfig().getPortBindings().getBindings().get(tcp23), - is(equalTo(Ports.Binding("0.0.0.0", 11023)))); + is(equalTo(Ports.Binding(11023)))); tmpContainers.add(container.getId()); }
Fix tests for work with docker server <I>
docker-java_docker-java
train
82c33b816a41c7340e283256c565eab3320623af
diff --git a/lib/db/cortex.php b/lib/db/cortex.php index <HASH>..<HASH> 100644 --- a/lib/db/cortex.php +++ b/lib/db/cortex.php @@ -2524,11 +2524,12 @@ class CortexCollection extends \ArrayIterator { { $out = array(); foreach ($this->getArrayCopy() as $model) { - if ($model->exists($prop,true)) { + if ($model instanceof Cortex && $model->exists($prop,true)) { $val = $model->get($prop, $raw); if (!empty($val)) $out[] = $val; - } + } elseif($raw) + $out[] = $model; } return $out; }
fix id assignments on collections, #8
ikkez_f3-cortex
train
ff4c82039d2359f09361cd08d0f6efe70dc9103f
diff --git a/forms/gridfield/GridFieldDeleteAction.php b/forms/gridfield/GridFieldDeleteAction.php index <HASH>..<HASH> 100644 --- a/forms/gridfield/GridFieldDeleteAction.php +++ b/forms/gridfield/GridFieldDeleteAction.php @@ -1,11 +1,20 @@ <?php /** - * This class is an GridField Component that add Delete action for Objects in the GridField. - * See {@link GridFieldRemoveButton} for detaching an item from the current relationship instead. + * This class is an GridField component that adds a delete action for objects in a {@link GridField}. + * + * This will also supports unlinking a relation instead of deleting the object. Use the {@link $removeRelation} + * property set in the constructor. + * + * <code> + * $action = new GridFieldDeleteAction(); // delete objects permanently + * $action = new GridFieldDeleteAction(true); // removes the relation to object, instead of deleting + * </code> + * + * @package sapphire + * @subpackage gridfield */ class GridFieldDeleteAction implements GridField_ColumnProvider, GridField_ActionProvider { - /** * If this is set to true, this actionprovider will remove the object from the list, instead of * deleting. In the case of a has one, has many or many many list it will uncouple the item from @@ -125,4 +134,4 @@ class GridFieldDeleteAction implements GridField_ColumnProvider, GridField_Actio $gridField->getList()->remove($item); } } -} \ No newline at end of file +}
MINOR Class documentation for GridFieldDeleteAction
silverstripe_silverstripe-framework
train
923c24f4383387778439c3e5d9112b7160721d27
diff --git a/src/Symfony/Component/HttpFoundation/Session/Storage/NativeSessionStorage.php b/src/Symfony/Component/HttpFoundation/Session/Storage/NativeSessionStorage.php index <HASH>..<HASH> 100644 --- a/src/Symfony/Component/HttpFoundation/Session/Storage/NativeSessionStorage.php +++ b/src/Symfony/Component/HttpFoundation/Session/Storage/NativeSessionStorage.php @@ -223,10 +223,6 @@ class NativeSessionStorage implements SessionStorageInterface $isRegenerated = session_regenerate_id($destroy); - // The reference to $_SESSION in session bags is lost in PHP7 and we need to re-create it. - // @see https://bugs.php.net/70013 - $this->loadSession(); - if (null !== $this->emulateSameSite) { $originalCookie = SessionUtils::popSessionCookie(session_name(), session_id()); if (null !== $originalCookie) {
No need to reconnect the bags to the session Bug <URL>
symfony_symfony
train
8b6842a898cfdcf3b7fdc74c167417e7ec85f630
diff --git a/src/Console/Commands/Init.php b/src/Console/Commands/Init.php index <HASH>..<HASH> 100644 --- a/src/Console/Commands/Init.php +++ b/src/Console/Commands/Init.php @@ -7,6 +7,7 @@ use NewUp\Exceptions\InvalidPathException; use NewUp\Templates\Package; use NewUp\Templates\TemplateInitializer; use Symfony\Component\Console\Input\InputArgument; +use Symfony\Component\Console\Input\InputOption; class Init extends Command { @@ -66,6 +67,8 @@ class Init extends Command } } + $this->templateInitializer->setShouldCreateTemplateDirectory($this->option('template-dir')); + $packageVendor = Package::parseVendorAndPackage($this->argument('name')); $this->templateInitializer->initialize($packageVendor[0], $packageVendor[1], $directory); } catch (InvalidPathException $invalidPath) { @@ -83,5 +86,12 @@ class Init extends Command ]; } + protected function getOptions() + { + return [ + ['template-dir', 't', InputOption::VALUE_NONE, 'If set, a "_template" directory will be created', null] + ]; + } + } \ No newline at end of file
New flag for creation of _template dir
newup_core
train
d1d850d68af3fbdeea661ff6ef34309c555e5801
diff --git a/bigquery/client.py b/bigquery/client.py index <HASH>..<HASH> 100644 --- a/bigquery/client.py +++ b/bigquery/client.py @@ -1079,9 +1079,14 @@ class BigQueryClient(object): write_disposition : str, optional One of the JOB_WRITE_* constants use_legacy_sql: bool, optional - If False, the query will use BigQuery's standard SQL (https://cloud.google.com/bigquery/sql-reference/) + If False, the query will use BigQuery's standard SQL + (https://cloud.google.com/bigquery/sql-reference/) maximum_billing_tier : integer, optional - Limits the billing tier for this job. Queries that have resource usage beyond this tier will fail (without incurring a charge). If unspecified, this will be set to your project default. For more information, see https://cloud.google.com/bigquery/pricing#high-compute + Limits the billing tier for this job. Queries that have resource + usage beyond this tier will fail (without incurring a charge). If + unspecified, this will be set to your project default. For more + information, + see https://cloud.google.com/bigquery/pricing#high-compute Returns ------- diff --git a/bigquery/tests/test_client.py b/bigquery/tests/test_client.py index <HASH>..<HASH> 100644 --- a/bigquery/tests/test_client.py +++ b/bigquery/tests/test_client.py @@ -1117,6 +1117,7 @@ class TestWriteToTable(unittest.TestCase): self.project_id = 'project' self.dataset_id = 'dataset' self.table_id = 'table' + self.maximum_billing_tier = 1000 self.external_udf_uris = ['gs://bucket/external_udf.js'] self.use_query_cache = False self.priority = "INTERACTIVE" @@ -1162,6 +1163,44 @@ class TestWriteToTable(unittest.TestCase): self.assertEqual(result, expected_result) + def test_write_maxbilltier(self): + """ Ensure that write is working when maximumBillingTier is set""" + expected_result = { + 'status': {'state': u'RUNNING'}, + } + + body = { + "configuration": { + "query": { + "destinationTable": { + "projectId": self.project_id, + "datasetId": self.dataset_id, + "tableId": self.table_id + }, + "query": self.query, + "userDefinedFunctionResources": [{ + "resourceUri": self.external_udf_uris[0] + }], + "useQueryCache": self.use_query_cache, + "priority": self.priority, + "maximumBillingTier": self.maximum_billing_tier + } + } + } + + self.mock_api.jobs().insert().execute.return_value = expected_result + result = self.client.write_to_table( + self.query, self.dataset_id, self.table_id, priority=self.priority, + external_udf_uris=self.external_udf_uris, use_query_cache=False, + maximum_billing_tier=self.maximum_billing_tier) + + self.mock_api.jobs().insert.assert_called_with( + projectId=self.project_id, + body=body + ) + + self.assertEqual(result, expected_result) + def test_write_http_error(self): """ Test write with http error""" expected_result = {
adding unittest and following pep8 standards
tylertreat_BigQuery-Python
train
4a62b6d23003ab60accadfce6a2052b2e2dbab23
diff --git a/tofu/data/_mesh.py b/tofu/data/_mesh.py index <HASH>..<HASH> 100644 --- a/tofu/data/_mesh.py +++ b/tofu/data/_mesh.py @@ -107,7 +107,10 @@ class Mesh2D(DataCollection): # get domain, poly from crop_poly if crop_poly is not None: - domain, poly = _mesh_checks._mesh2DRect_from_croppoly(crop_poly) + domain, poly = _mesh_checks._mesh2DRect_from_croppoly( + crop_poly=crop_poly, + domain=domain, + ) else: poly = None diff --git a/tofu/data/_mesh_checks.py b/tofu/data/_mesh_checks.py index <HASH>..<HASH> 100644 --- a/tofu/data/_mesh_checks.py +++ b/tofu/data/_mesh_checks.py @@ -652,7 +652,7 @@ def _mesh2DRect_to_dict( return dref, dmesh -def _mesh2DRect_from_croppoly(crop_poly=None): +def _mesh2DRect_from_croppoly(crop_poly=None, domain=None): # ------------ # check inputs @@ -668,8 +668,11 @@ def _mesh2DRect_from_croppoly(crop_poly=None): ) or crop_poly.__class__.__name__ == 'Config', c0 - and all([hasattr(cc, '__iter__') and len(cc) == len(crop_poly[0])]) - and np.asrray(crop_poly).ndim == 2 + and all([ + hasattr(cc, '__iter__') and len(cc) == len(crop_poly[0]) + for cc in crop_poly[1:] + ]) + and np.asarray(crop_poly).ndim == 2 ] if not any(lc): @@ -684,7 +687,6 @@ def _mesh2DRect_from_croppoly(crop_poly=None): if lc[0]: # trivial case poly = None - domain = None else: @@ -692,6 +694,7 @@ def _mesh2DRect_from_croppoly(crop_poly=None): # Get poly from input if lc[1]: + # (config, structure name) if crop_poly.__class__.__name__ == 'Config': config = crop_poly @@ -711,7 +714,7 @@ def _mesh2DRect_from_croppoly(crop_poly=None): else: - # make sure poloy is np.ndarraya and closed + # make sure poly is np.ndarraya and closed poly = np.asarray(crop_poly).astype(float) if not np.allclose(poly[:, 0], poly[:, -1]): poly = np.concatenate((poly, poly[:, 0:1])) @@ -719,10 +722,11 @@ def _mesh2DRect_from_croppoly(crop_poly=None): # ------------- # Get domain from poly - domain = [ - [poly[0, :].min(), poly[0, :].max()], - [poly[1, :].min(), poly[1, :].max()], - ] + if domain is None: + domain = [ + [poly[0, :].min(), poly[0, :].max()], + [poly[1, :].min(), poly[1, :].max()], + ] return domain, poly
[#<I>] Added possibility of specifying domain when crop_poly is provided to Mesh2D.add_mesh()
ToFuProject_tofu
train
195a7f1a6fd74e9681f7a51b8bf29c99a5036867
diff --git a/dispatch/static/manager/src/js/components/inputs/TextInput.js b/dispatch/static/manager/src/js/components/inputs/TextInput.js index <HASH>..<HASH> 100644 --- a/dispatch/static/manager/src/js/components/inputs/TextInput.js +++ b/dispatch/static/manager/src/js/components/inputs/TextInput.js @@ -39,7 +39,7 @@ export default class TextInput extends React.Component { ref='input' className={`pt-input${ this.props.fill ? ' pt-fill' : '' }${ this.props.className ? ' ' + this.props.className : ''}`} type={this.props.type || 'text'} - value={this.props.value} + value={this.props.value || ''} disabled={this.props.disabled} placeholder={this.props.placeholder} onChange={this.handleOnChange}
give textinput default value to prevent a warning this doesn't break placeholder
ubyssey_dispatch
train
a477542c6c0a5b9ea8601093967bd01b0631ec05
diff --git a/test/addon/core/TestAddOn.spec.js b/test/addon/core/TestAddOn.spec.js index <HASH>..<HASH> 100644 --- a/test/addon/core/TestAddOn.spec.js +++ b/test/addon/core/TestAddOn.spec.js @@ -112,11 +112,8 @@ describe('TestAddOn', function () { }); after(function (done) { - if (server) { - server.stop(done); - } else { - done(); - } + socket.disconnect(); + server.stop(done); }); it('should start, query, update, query, and stop', function (done) {
#<I> disconnect socket in test. Former-commit-id: 8bbd1f<I>f7b8c<I>e4e6a0e<I>da
webgme_webgme-engine
train
60819a1a10f9eb07a270605a1aaf094521ade643
diff --git a/mstate/unit.go b/mstate/unit.go index <HASH>..<HASH> 100644 --- a/mstate/unit.go +++ b/mstate/unit.go @@ -3,6 +3,7 @@ package mstate import ( "errors" "fmt" + "labix.org/v2/mgo" "labix.org/v2/mgo/bson" ) @@ -11,9 +12,10 @@ import ( type ResolvedMode int const ( - ResolvedNone ResolvedMode = 0 - ResolvedRetryHooks ResolvedMode = 1000 - ResolvedNoHooks ResolvedMode = 1001 + ResolvedNone ResolvedMode = iota + ResolvedRetryHooks + ResolvedNoHooks + nResolvedModes ) // AssignmentPolicy controls what machine a unit will be assigned to. @@ -54,10 +56,11 @@ type unitDoc struct { Name string `bson:"_id"` Service string Principal string - MachineId *int - Life Life PublicAddress *string PrivateAddress *string + MachineId *int + Resolved ResolvedMode + Life Life } // Unit represents the state of a service unit. @@ -88,6 +91,11 @@ func (u *Unit) Name() string { return u.doc.Name } +// Resolved returns the resolved mode for the unit. +func (u *Unit) Resolved() (mode ResolvedMode, err error) { + return u.doc.Resolved, nil +} + // IsPrincipal returns whether the unit is deployed in its own container, // and can therefore have subordinate services deployed alongside it. func (u *Unit) IsPrincipal() bool { @@ -195,3 +203,41 @@ func (u *Unit) SetPrivateAddress(address string) error { u.doc.PrivateAddress = &address return nil } + +// SetResolved marks the unit as having had any previous state transition +// problems resolved, and informs the unit that it may attempt to +// reestablish normal workflow. The resolved mode parameter informs +// whether to attempt to reexecute previous failed hooks or to continue +// as if they had succeeded before. +func (u *Unit) SetResolved(mode ResolvedMode) (err error) { + defer errorContextf(&err, "cannot set resolved mode for unit %q", u) + if !(0 <= mode && mode < nResolvedModes) { + return fmt.Errorf("invalid error resolution mode: %v", mode) + } + change := bson.D{{"$set", bson.D{{"resolved", mode}}}} + sel := bson.D{ + {"_id", u.doc.Name}, + {"resolved", ResolvedNone}, + } + err = u.st.units.Update(sel, change) + if err == mgo.ErrNotFound { + return errors.New("flag already set") + } + if err != nil { + return err + } + u.doc.Resolved = mode + return nil +} + +// ClearResolved removes any resolved setting on the unit. +func (u *Unit) ClearResolved() error { + change := bson.D{{"$set", bson.D{{"resolved", ResolvedNone}}}} + sel := bson.D{{"_id", u.doc.Name}} + err := u.st.units.Update(sel, change) + if err != nil { + return fmt.Errorf("cannot clear resolved mode for unit %q: %v", u, err) + } + u.doc.Resolved = ResolvedNone + return nil +} diff --git a/mstate/unit_test.go b/mstate/unit_test.go index <HASH>..<HASH> 100644 --- a/mstate/unit_test.go +++ b/mstate/unit_test.go @@ -41,3 +41,28 @@ func (s *UnitSuite) TestGetSetPrivateAddress(c *C) { c.Assert(err, IsNil) c.Assert(address, Equals, "example.local") } + +func (s *UnitSuite) TestGetSetClearResolved(c *C) { + setting, err := s.unit.Resolved() + c.Assert(err, IsNil) + c.Assert(setting, Equals, state.ResolvedNone) + + err = s.unit.SetResolved(state.ResolvedNoHooks) + c.Assert(err, IsNil) + err = s.unit.SetResolved(state.ResolvedNoHooks) + c.Assert(err, ErrorMatches, `cannot set resolved mode for unit "wordpress/0": flag already set`) + retry, err := s.unit.Resolved() + c.Assert(err, IsNil) + c.Assert(retry, Equals, state.ResolvedNoHooks) + + err = s.unit.ClearResolved() + c.Assert(err, IsNil) + setting, err = s.unit.Resolved() + c.Assert(err, IsNil) + c.Assert(setting, Equals, state.ResolvedNone) + err = s.unit.ClearResolved() + c.Assert(err, IsNil) + + err = s.unit.SetResolved(state.ResolvedMode(999)) + c.Assert(err, ErrorMatches, `cannot set resolved mode for unit "wordpress/0": invalid error resolution mode: 999`) +}
mstate: add {Set,Get,}Resolved
juju_juju
train
f943f047e881435d711feab1aa12c9c931bad4b3
diff --git a/lxd/storage/drivers/driver_lvm_utils.go b/lxd/storage/drivers/driver_lvm_utils.go index <HASH>..<HASH> 100644 --- a/lxd/storage/drivers/driver_lvm_utils.go +++ b/lxd/storage/drivers/driver_lvm_utils.go @@ -50,26 +50,6 @@ func (d *lvm) thinpoolName() string { return "LXDThinPool" } -// volumeFilesystem returns the filesystem to use for logical volumes. -func (d *lvm) volumeFilesystem(vol Volume) string { - fs := vol.ExpandedConfig("block.filesystem") - if fs != "" { - return fs - } - - return DefaultFilesystem -} - -// volumeSize returns the size to use when creating new a volume. -func (d *lvm) volumeSize(vol Volume) string { - size := vol.ExpandedConfig("size") - if size == "" || size == "0" { - return defaultBlockSize - } - - return size -} - // mountOptions returns the mount options for volumes. func (d *lvm) volumeMountOptions(vol Volume) string { if d.config["block.mount_options"] != "" {
lxd/storage/drivers/driver/lvm/utils: Removes functions moved into Volume struct
lxc_lxd
train
37d036a84cfcd39e3ca7723540c6a6791d58fc69
diff --git a/test/unit/support.js b/test/unit/support.js index <HASH>..<HASH> 100644 --- a/test/unit/support.js +++ b/test/unit/support.js @@ -83,6 +83,22 @@ testIframeWithCallback( "Check CSP (https://developer.mozilla.org/en-US/docs/Sec "radioValue": false, "reliableMarginRight": true }; + } else if ( /trident\/7\.0/i.test( userAgent ) ) { + expected = { + "ajax": true, + "boxSizingReliable": false, + "checkClone": true, + "checkOn": true, + "clearCloneStyle": false, + "cors": true, + "focusinBubbles": true, + "noCloneChecked": true, + "optDisabled": true, + "optSelected": false, + "pixelPosition": true, + "radioValue": false, + "reliableMarginRight": true + }; } else if ( /msie 10\.0/i.test( userAgent ) ) { expected = { "ajax": true, @@ -115,6 +131,22 @@ testIframeWithCallback( "Check CSP (https://developer.mozilla.org/en-US/docs/Sec "radioValue": false, "reliableMarginRight": true }; + } else if ( /7\.0\.\d+ safari/i.test( userAgent ) ) { + expected = { + "ajax": true, + "boxSizingReliable": true, + "checkClone": true, + "checkOn": true, + "clearCloneStyle": true, + "cors": true, + "focusinBubbles": false, + "noCloneChecked": true, + "optDisabled": true, + "optSelected": true, + "pixelPosition": false, + "radioValue": true, + "reliableMarginRight": true + }; } else if ( /6\.0\.\d+ safari/i.test( userAgent ) ) { expected = { "ajax": true,
Support: Add expected support results for Safari 7 & IE<I>
jquery_jquery
train
9343833f1af3feeafc355b26f7390d7a40f5e54d
diff --git a/querydsl-hql/src/main/java/com/mysema/query/hql/HQLTemplates.java b/querydsl-hql/src/main/java/com/mysema/query/hql/HQLTemplates.java index <HASH>..<HASH> 100644 --- a/querydsl-hql/src/main/java/com/mysema/query/hql/HQLTemplates.java +++ b/querydsl-hql/src/main/java/com/mysema/query/hql/HQLTemplates.java @@ -51,6 +51,8 @@ public class HQLTemplates extends Templates { add(Ops.MathOps.SQRT, "sqrt({0})"); // various + add(Ops.NE_PRIMITIVE, "{0} <> {1}", 25); + add(Ops.NE_OBJECT, "{0} <> {1}", 25); add(Ops.IS_NULL, "{0} is null", 26); add(Ops.IS_NOT_NULL, "{0} is not null", 26); diff --git a/querydsl-hql/src/test/java/com/mysema/query/hql/ComparableTest.java b/querydsl-hql/src/test/java/com/mysema/query/hql/ComparableTest.java index <HASH>..<HASH> 100644 --- a/querydsl-hql/src/test/java/com/mysema/query/hql/ComparableTest.java +++ b/querydsl-hql/src/test/java/com/mysema/query/hql/ComparableTest.java @@ -17,7 +17,7 @@ public class ComparableTest extends AbstractQueryTest{ assertToString("cat.bodyWeight > kitten.bodyWeight", cat.bodyWeight.gt(kitten.bodyWeight)); assertToString("cat.bodyWeight <= kitten.bodyWeight", cat.bodyWeight.loe(kitten.bodyWeight)); assertToString("cat.bodyWeight < kitten.bodyWeight", cat.bodyWeight.lt(kitten.bodyWeight)); - assertToString("cat.bodyWeight != kitten.bodyWeight", cat.bodyWeight.ne(kitten.bodyWeight)); + assertToString("cat.bodyWeight <> kitten.bodyWeight", cat.bodyWeight.ne(kitten.bodyWeight)); // toString("cat.name like :a1", cat.name.like("Kitty")); } diff --git a/querydsl-hql/src/test/java/com/mysema/query/hql/FeaturesTest.java b/querydsl-hql/src/test/java/com/mysema/query/hql/FeaturesTest.java index <HASH>..<HASH> 100644 --- a/querydsl-hql/src/test/java/com/mysema/query/hql/FeaturesTest.java +++ b/querydsl-hql/src/test/java/com/mysema/query/hql/FeaturesTest.java @@ -58,7 +58,7 @@ public class FeaturesTest extends AbstractQueryTest { public void testBasicOperations() { assertToString("cat.bodyWeight = kitten.bodyWeight", cat.bodyWeight .eq(kitten.bodyWeight)); - assertToString("cat.bodyWeight != kitten.bodyWeight", cat.bodyWeight + assertToString("cat.bodyWeight <> kitten.bodyWeight", cat.bodyWeight .ne(kitten.bodyWeight)); assertToString( @@ -70,7 +70,7 @@ public class FeaturesTest extends AbstractQueryTest { public void testEqualsAndNotEqualsForAllExpressions() { assertToString("cat.name = cust.name.firstName", cat.name .eq(cust.name.firstName)); - assertToString("cat.name != cust.name.firstName", cat.name + assertToString("cat.name <> cust.name.firstName", cat.name .ne(cust.name.firstName)); }
#<I> : worked on EclipseLink support
querydsl_querydsl
train
29fe417b7dfec51803c9f41a317a1290093b8f5c
diff --git a/src/Overlay.js b/src/Overlay.js index <HASH>..<HASH> 100644 --- a/src/Overlay.js +++ b/src/Overlay.js @@ -78,10 +78,9 @@ class Overlay extends React.Component { </Transition> ); } else { - child = cloneElement( - child, - {className: classNames('in', child.className)} - ); + child = cloneElement(child, { + className: classNames('in', child.props.className) + }); } // This goes after everything else because it adds a wrapping div. diff --git a/src/Position.js b/src/Position.js index <HASH>..<HASH> 100644 --- a/src/Position.js +++ b/src/Position.js @@ -1,4 +1,5 @@ import React, { cloneElement } from 'react'; +import classNames from 'classnames'; import domUtils from './utils/domUtils'; import { calcOverlayPosition } from './utils/overlayPositionUtils'; import CustomPropTypes from './utils/CustomPropTypes'; @@ -40,7 +41,7 @@ class Position extends React.Component { } render() { - const {children, ...props} = this.props; + const {children, className, ...props} = this.props; const {positionLeft, positionTop, ...arrowPosition} = this.state; const child = React.Children.only(children); @@ -51,6 +52,7 @@ class Position extends React.Component { ...arrowPosition, positionTop, positionLeft, + className: classNames(className, child.props.className), style: { ...child.props.style, left: positionLeft, diff --git a/test/OverlayTriggerSpec.js b/test/OverlayTriggerSpec.js index <HASH>..<HASH> 100644 --- a/test/OverlayTriggerSpec.js +++ b/test/OverlayTriggerSpec.js @@ -39,6 +39,19 @@ describe('OverlayTrigger', function() { instance.state.isOverlayShown.should.be.true; }); + it('Should maintain overlay classname', function() { + const instance = ReactTestUtils.renderIntoDocument( + <OverlayTrigger trigger='click' overlay={<div className='test-overlay'>test</div>}> + <button>button</button> + </OverlayTrigger> + ); + + const overlayTrigger = React.findDOMNode(instance); + ReactTestUtils.Simulate.click(overlayTrigger); + + expect(document.getElementsByClassName('test-overlay').length).to.equal(1) + }); + it('Should pass transition callbacks to Transition', function (done) { let count = 0; let increment = ()=> count++;
[fixed] overlay classNames are maintained by overlayTrigget fixes #<I>
react-bootstrap_react-bootstrap
train
f53a86b3614343b145f9d14bb2d12b5f1f1246bd
diff --git a/src/wavesurfer.js b/src/wavesurfer.js index <HASH>..<HASH> 100644 --- a/src/wavesurfer.js +++ b/src/wavesurfer.js @@ -17,6 +17,7 @@ var WaveSurfer = { pixelRatio : window.devicePixelRatio, fillParent : true, scrollParent : false, + hideScrollbar : false, normalize : false, audioContext : null, container : null,
Add ability to hide scrollbar Sometimes you may want to not have the scrollbar visible (IE: 2 or more linked waveforms on the page controlled by a single scrollbar). The "hideScrollbar" option (default=false for backward compatibility) allows for this
katspaugh_wavesurfer.js
train
389af9207cfe18e6aa9ba7d9346f1390c104ca93
diff --git a/lib/classes/Swift/Message.php b/lib/classes/Swift/Message.php index <HASH>..<HASH> 100644 --- a/lib/classes/Swift/Message.php +++ b/lib/classes/Swift/Message.php @@ -75,7 +75,7 @@ class Swift_Message extends Swift_Mime_SimpleMessage } /** - * Detach a signature handler from a message. + * Attach a new signature handler to the message. * * @return $this */ @@ -91,7 +91,7 @@ class Swift_Message extends Swift_Mime_SimpleMessage } /** - * Attach a new signature handler to the message. + * Detach a signature handler from a message. * * @return $this */
Correct doc blocks of signer methods
swiftmailer_swiftmailer
train
cd118c353629ca4df7b770cc4fb7a28a3648d8e9
diff --git a/core/src/elements/ons-popover/index.js b/core/src/elements/ons-popover/index.js index <HASH>..<HASH> 100644 --- a/core/src/elements/ons-popover/index.js +++ b/core/src/elements/ons-popover/index.js @@ -550,6 +550,12 @@ export default class PopoverElement extends BaseElement { contentReady(this, () => { this._margin = this._margin || parseInt(window.getComputedStyle(this).getPropertyValue('top')); + + // Fix for iframes + if (!this._margin) { + this._margin = 6; + } + this._radius = parseInt(window.getComputedStyle(this._content).getPropertyValue('border-top-left-radius')); this._mask.addEventListener('click', this._boundCancel, false);
fix(ons-popover): Position in iFrames.
OnsenUI_OnsenUI
train