hash
stringlengths
40
40
diff
stringlengths
131
114k
message
stringlengths
7
980
project
stringlengths
5
67
split
stringclasses
1 value
66ae23d2a5dcebe34596f6eba01bf436f01229fe
diff --git a/grunt/karma.js b/grunt/karma.js index <HASH>..<HASH> 100644 --- a/grunt/karma.js +++ b/grunt/karma.js @@ -40,7 +40,6 @@ module.exports = { }, phantom: { - configFile: 'grunt/karma-configs/phantom.karma.js', frameworks: ['jasmine', 'es5-shim'], browsers: ['PhantomJS'], },
Attempt at fixing grunt test, phantomJS error.
optimizely_nuclear-js
train
c12ae7b86ff5da75341fecd5ab9f9adb06cf5cb5
diff --git a/src/Command.php b/src/Command.php index <HASH>..<HASH> 100644 --- a/src/Command.php +++ b/src/Command.php @@ -147,14 +147,14 @@ class Command $command = escapeshellcmd($command); } if ($this->getIsWindows()) { - $position = null; - // Make sure to switch to correct drive like "E:" first if we have a full path in command if (isset($command[1]) && $command[1]===':') { $position = 1; // Could be a quoted absolute path because of spaces. i.e. "C:\Program Files (x86)\file.exe" } elseif (isset($command[2]) && $command[2]===':') { $position = 2; + } else { + $position = false; } // Absolute path. If it's a relative path, let it slide.
Move $position into its own else block and default it to false instead of null.
mikehaertl_php-shellcommand
train
f575a3a4e5435120f35192019a08d369c4cf96a9
diff --git a/dustmaps/json_serializers.py b/dustmaps/json_serializers.py index <HASH>..<HASH> 100644 --- a/dustmaps/json_serializers.py +++ b/dustmaps/json_serializers.py @@ -28,6 +28,7 @@ from __future__ import print_function +import six import json import base64 @@ -85,13 +86,13 @@ def deserialize_dtype(d): Returns: A ``dtype`` object. """ - if type(d['descr']) in (str, unicode): + if isinstance(d['descr'], six.string_types): return np.dtype(d['descr']) descr = [] for col in d['descr']: col_descr = [] for c in col: - if type(c) in (str, unicode): + if isinstance(d['descr'], six.string_types): col_descr.append(str(c)) elif type(c) is list: col_descr.append(tuple(c)) @@ -117,7 +118,7 @@ def serialize_ndarray_b64(o): data_b64 = base64.b64encode(o_data) return dict( _type='np.ndarray', - data=data_b64, + data=data_b64.decode('utf-8'), dtype=o.dtype, shape=o.shape)
Fixing JSON serialization of strings in Python3.
gregreen_dustmaps
train
51fa7ec0e9af89ade12f3b73e70e941bde72aa56
diff --git a/luigi/contrib/s3.py b/luigi/contrib/s3.py index <HASH>..<HASH> 100644 --- a/luigi/contrib/s3.py +++ b/luigi/contrib/s3.py @@ -264,9 +264,6 @@ class S3Client(FileSystem): self._check_deprecated_argument(**kwargs) (bucket, key) = self._path_to_bucket_and_key(destination_s3_path) - # validate the bucket - self._validate_bucket(bucket) - # put the file self.s3.meta.client.put_object( Key=key, Bucket=bucket, Body=content, **kwargs) @@ -289,9 +286,6 @@ class S3Client(FileSystem): (bucket, key) = self._path_to_bucket_and_key(destination_s3_path) - # validate the bucket - self._validate_bucket(bucket) - self.s3.meta.client.upload_fileobj( Fileobj=open(local_path, 'rb'), Bucket=bucket, Key=key, Config=transfer_config, ExtraArgs=kwargs) @@ -535,19 +529,6 @@ class S3Client(FileSystem): 'For region names, refer to the amazon S3 region documentation\n' 'https://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region') - def _validate_bucket(self, bucket_name): - exists = True - - try: - self.s3.meta.client.head_bucket(Bucket=bucket_name) - except botocore.exceptions.ClientError as e: - error_code = e.response['Error']['Code'] - if error_code in ('404', 'NoSuchBucket'): - exists = False - else: - raise - return exists - def _exists(self, bucket, key): try: self.s3.Object(bucket, key).load() diff --git a/test/contrib/s3_test.py b/test/contrib/s3_test.py index <HASH>..<HASH> 100644 --- a/test/contrib/s3_test.py +++ b/test/contrib/s3_test.py @@ -177,6 +177,12 @@ class TestS3Client(unittest.TestCase): s3_client.put(self.tempFilePath, 's3://mybucket/putMe') self.assertTrue(s3_client.exists('s3://mybucket/putMe')) + def test_put_no_such_bucket(self): + # intentionally don't create bucket + s3_client = S3Client(AWS_ACCESS_KEY, AWS_SECRET_KEY) + with self.assertRaises(s3_client.s3.meta.client.exceptions.NoSuchBucket): + s3_client.put(self.tempFilePath, 's3://mybucket/putMe') + def test_put_sse_deprecated(self): create_bucket() s3_client = S3Client(AWS_ACCESS_KEY, AWS_SECRET_KEY) @@ -197,6 +203,12 @@ class TestS3Client(unittest.TestCase): s3_client.put_string("SOMESTRING", 's3://mybucket/putString') self.assertTrue(s3_client.exists('s3://mybucket/putString')) + def test_put_string_no_such_bucket(self): + # intentionally don't create bucket + s3_client = S3Client(AWS_ACCESS_KEY, AWS_SECRET_KEY) + with self.assertRaises(s3_client.s3.meta.client.exceptions.NoSuchBucket): + s3_client.put_string("SOMESTRING", 's3://mybucket/putString') + def test_put_string_sse_deprecated(self): create_bucket() s3_client = S3Client(AWS_ACCESS_KEY, AWS_SECRET_KEY) @@ -259,6 +271,12 @@ class TestS3Client(unittest.TestCase): file_size = 5000 return self._run_multipart_test(part_size, file_size) + def test_put_multipart_no_such_bucket(self): + # intentionally don't create bucket + s3_client = S3Client(AWS_ACCESS_KEY, AWS_SECRET_KEY) + with self.assertRaises(s3_client.s3.meta.client.exceptions.NoSuchBucket): + s3_client.put_multipart(self.tempFilePath, 's3://mybucket/putMe') + def test_exists(self): create_bucket() s3_client = S3Client(AWS_ACCESS_KEY, AWS_SECRET_KEY)
Remove s3 bucket validation prior to file upload (#<I>) * Remove unnecessary s3 validate bucket checkout prior to content upload * Add tests for s3 put* when bucket doesn't exist
spotify_luigi
train
f9aa0e21d5e442ec13c45bafbc40d790472c4bfc
diff --git a/lib/chronic/chronic.rb b/lib/chronic/chronic.rb index <HASH>..<HASH> 100644 --- a/lib/chronic/chronic.rb +++ b/lib/chronic/chronic.rb @@ -123,6 +123,7 @@ module Chronic text.gsub!(/\b\d+:?\d*[ap]\b/,'\0m') text.gsub!(/(\d)([ap]m|oclock)\b/, '\1 \2') text.gsub!(/\b(hence|after|from)\b/, 'future') + text.gsub!(/^a /, '1 ') text end diff --git a/test/test_parsing.rb b/test/test_parsing.rb index <HASH>..<HASH> 100644 --- a/test/test_parsing.rb +++ b/test/test_parsing.rb @@ -685,6 +685,17 @@ class TestParsing < TestCase assert_equal Time.local(2006, 8, 8, 12), time end + def test_parse_guess_a_ago + time = parse_now("a day ago") + assert_equal Time.local(2006, 8, 15, 14), time + + time = parse_now("a month ago") + assert_equal Time.local(2006, 7, 16, 14), time + + time = parse_now("a year ago") + assert_equal Time.local(2005, 8, 16, 14), time + end + def test_parse_guess_s_r_p # past
handle text starting w/ a ...
mojombo_chronic
train
5fa808a78860afaeb14babb7d8752daae0a2bcb7
diff --git a/controller/appcontroller.go b/controller/appcontroller.go index <HASH>..<HASH> 100644 --- a/controller/appcontroller.go +++ b/controller/appcontroller.go @@ -700,6 +700,11 @@ func (ctrl *ApplicationController) finalizeApplicationDeletion(app *appv1.Applic return nil, err } + err = argo.ValidateDestination(context.Background(), &app.Spec.Destination, ctrl.db) + if err != nil { + return nil, err + } + objsMap, err := ctrl.getPermittedAppLiveObjects(app, proj) if err != nil { return nil, err diff --git a/controller/appcontroller_test.go b/controller/appcontroller_test.go index <HASH>..<HASH> 100644 --- a/controller/appcontroller_test.go +++ b/controller/appcontroller_test.go @@ -600,6 +600,50 @@ func TestFinalizeAppDeletion(t *testing.T) { assert.NotEqual(t, "test-cm", o.GetName()) } }) + + t.Run("DeleteWithDestinationClusterName", func(t *testing.T) { + app := newFakeAppWithDestName() + appObj := kube.MustToUnstructured(&app) + ctrl := newFakeController(&fakeData{apps: []runtime.Object{app, &defaultProj}, managedLiveObjs: map[kube.ResourceKey]*unstructured.Unstructured{ + kube.GetResourceKey(appObj): appObj, + }}) + patched := false + fakeAppCs := ctrl.applicationClientset.(*appclientset.Clientset) + defaultReactor := fakeAppCs.ReactionChain[0] + fakeAppCs.ReactionChain = nil + fakeAppCs.AddReactor("get", "*", func(action kubetesting.Action) (handled bool, ret runtime.Object, err error) { + return defaultReactor.React(action) + }) + fakeAppCs.AddReactor("patch", "*", func(action kubetesting.Action) (handled bool, ret runtime.Object, err error) { + patched = true + return true, nil, nil + }) + _, err := ctrl.finalizeApplicationDeletion(app) + assert.NoError(t, err) + assert.True(t, patched) + }) + + t.Run("ErrorOnBothDestNameAndServer", func(t *testing.T) { + app := newFakeAppWithDestMismatch() + appObj := kube.MustToUnstructured(&app) + ctrl := newFakeController(&fakeData{apps: []runtime.Object{app, &defaultProj}, managedLiveObjs: map[kube.ResourceKey]*unstructured.Unstructured{ + kube.GetResourceKey(appObj): appObj, + }}) + patched := false + fakeAppCs := ctrl.applicationClientset.(*appclientset.Clientset) + defaultReactor := fakeAppCs.ReactionChain[0] + fakeAppCs.ReactionChain = nil + fakeAppCs.AddReactor("get", "*", func(action kubetesting.Action) (handled bool, ret runtime.Object, err error) { + return defaultReactor.React(action) + }) + fakeAppCs.AddReactor("patch", "*", func(action kubetesting.Action) (handled bool, ret runtime.Object, err error) { + patched = true + return true, nil, nil + }) + _, err := ctrl.finalizeApplicationDeletion(app) + assert.EqualError(t, err, "application destination can't have both name and server defined: another-cluster https://localhost:6443") + assert.False(t, patched) + }) } // TestNormalizeApplication verifies we normalize an application during reconciliation
fix: Infer cluster URL while deleting app resource (#<I>) An application can be created by specifying only cluster name. Since cluster URL is used for queries, it should inferred. ValidateDestination() method will infer the cluster URL if cluster name is present. Fixes: #<I>
argoproj_argo-cd
train
23b04ab0423b6fa81df3583ac4b8eae4cc537027
diff --git a/pylint_django/compat.py b/pylint_django/compat.py index <HASH>..<HASH> 100644 --- a/pylint_django/compat.py +++ b/pylint_django/compat.py @@ -22,10 +22,3 @@ except ImportError: except ImportError: from astroid.util import Uninferable -try: - django = __import__("django") - django_version = django.VERSION -except ImportError: - # if not available, will be handled by the django_installed checker - django_version = (1, 5) -
Remove unnecessary django_version compat this is not really used and elsewhere we do: from django import VERSION as django_version
PyCQA_pylint-django
train
483194df0e5a37025a3789dc7ea3bab1a20077c6
diff --git a/hazelcast/src/main/java/com/hazelcast/logging/LoggingServiceImpl.java b/hazelcast/src/main/java/com/hazelcast/logging/LoggingServiceImpl.java index <HASH>..<HASH> 100644 --- a/hazelcast/src/main/java/com/hazelcast/logging/LoggingServiceImpl.java +++ b/hazelcast/src/main/java/com/hazelcast/logging/LoggingServiceImpl.java @@ -30,7 +30,7 @@ import java.util.logging.LogRecord; import static com.hazelcast.util.ConcurrencyUtil.getOrPutIfAbsent; public class LoggingServiceImpl implements LoggingService { - private volatile MemberImpl thisMember; // = new MemberImpl(); + private volatile MemberImpl thisMember = new MemberImpl(); private final SystemLogService systemLogService; private final String groupName; private final CopyOnWriteArrayList<LogListenerRegistration> listeners
Added npe fix on LogEvent/AddressPicker
hazelcast_hazelcast
train
a92c77d4c8ee36b2bc93e6f97c1a9aee85aa731c
diff --git a/client/lib/posts/actions.js b/client/lib/posts/actions.js index <HASH>..<HASH> 100644 --- a/client/lib/posts/actions.js +++ b/client/lib/posts/actions.js @@ -409,7 +409,7 @@ PostActions = { trash: function( post, callback ) { var postHandle = wpcom.site( post.site_ID ).post( post.ID ); - postHandle.del( PostActions.receiveUpdate.bind( null, callback ) ); + postHandle.delete( PostActions.receiveUpdate.bind( null, callback ) ); }, /**
Fix for trashing posts and pages.
Automattic_wp-calypso
train
f118d58e4b46b0cf9a9a29eb1831c50406b74f27
diff --git a/lib/get/walkPath.js b/lib/get/walkPath.js index <HASH>..<HASH> 100644 --- a/lib/get/walkPath.js +++ b/lib/get/walkPath.js @@ -4,6 +4,7 @@ var onValue = require("./onValue"); var isExpired = require("./util/isExpired"); var iterateKeySet = require("falcor-path-utils").iterateKeySet; var $ref = require("./../types/ref"); +var __version = require("./../internal/version"); module.exports = function walkPath(model, root, curr, path, depth, seed, outerResults, branchInfo, requestedPath, @@ -92,11 +93,13 @@ module.exports = function walkPath(model, root, curr, path, depth, seed, // followed or the key that it took to get here. if (fromReference) { branchInfo[depth] = { + //__version: curr[__version], __path: refPath }; } else { branchInfo[depth] = { + //__version: curr[__version], __key: key, __parent: null };
added the spot for the version number.
Netflix_falcor
train
a0e93034d75d2b86b8f2f208288c199eb074483f
diff --git a/kuyruk/process.py b/kuyruk/process.py index <HASH>..<HASH> 100644 --- a/kuyruk/process.py +++ b/kuyruk/process.py @@ -85,7 +85,7 @@ class KuyrukProcess(object): logging.config.fileConfig(self.config.LOGGING_CONFIG) else: logging.getLogger('pika').level = logging.WARNING - level = getattr(logging, self.config.LOGGING_LEVEL) + level = getattr(logging, self.config.LOGGING_LEVEL.upper()) logging.basicConfig(level=level)
logging level may be in lower case
cenkalti_kuyruk
train
94f521faa51890d9308f0ad80abc245f5edba287
diff --git a/packages/vuetifyjs.com/src/entry-server.js b/packages/vuetifyjs.com/src/entry-server.js index <HASH>..<HASH> 100644 --- a/packages/vuetifyjs.com/src/entry-server.js +++ b/packages/vuetifyjs.com/src/entry-server.js @@ -33,7 +33,7 @@ export default context => { Promise.all([ ...matchedComponents.map(component => { if (component.asyncData) { - component.asyncData({ + return component.asyncData({ store, route: router.currentRoute })
docs: fixed bug with asyncData
vuetifyjs_vuetify
train
3b838e47f96f74cff75a938450853bf0c36d2cf0
diff --git a/lib/kyotocabinet.rb b/lib/kyotocabinet.rb index <HASH>..<HASH> 100644 --- a/lib/kyotocabinet.rb +++ b/lib/kyotocabinet.rb @@ -292,6 +292,7 @@ module Java::Kyotocabinet self._set(k.to_java_bytes, v.to_s.to_java_bytes) end alias_method :[]=, :set + alias_method :store, :set alias_method :_set_bulk, :set_bulk def set_bulk(rec_h, atomic) diff --git a/spec/kyotocabinet_spec.rb b/spec/kyotocabinet_spec.rb index <HASH>..<HASH> 100644 --- a/spec/kyotocabinet_spec.rb +++ b/spec/kyotocabinet_spec.rb @@ -161,6 +161,13 @@ module KyotoCabinet end end + describe "#store" do + it "works like #set" do + @db.store("a", "b") + @db["a"].should == "b" + end + end + after(:each) do @db.close end
Added DB#store support.
csw_kyotocabinet-java
train
0e2b2fcbe625a9b939d4a4c8dca37264edf34b20
diff --git a/framework/db/Query.php b/framework/db/Query.php index <HASH>..<HASH> 100644 --- a/framework/db/Query.php +++ b/framework/db/Query.php @@ -356,7 +356,7 @@ class Query extends Component implements QueryInterface $this->limit = $limit; $this->offset = $offset; - if (empty($this->groupBy) && $this->distinct !== true) { + if (empty($this->groupBy) && !$this->distinct) { return $command->queryScalar(); } else { return (new Query)->select([$selectExpression])
Fix for logic Query::queryScalar uses to check for SELECT DISTINCT
yiisoft_yii2
train
47d8ae5fae7bb2bdbb24bec9341614926acff566
diff --git a/yowsup/layers/__init__.py b/yowsup/layers/__init__.py index <HASH>..<HASH> 100644 --- a/yowsup/layers/__init__.py +++ b/yowsup/layers/__init__.py @@ -32,7 +32,6 @@ class YowLayer(object): # self.setLayers(upperLayer, lowerLayer) def __init__(self): - super(YowLayer, self).__init__() self.setLayers(None, None) def setStack(self, stack):
Update __init__.py Vain parent calling was removed
tgalal_yowsup
train
e098c083c618f14918336f0bd4d71e1bda72c00b
diff --git a/pkg/deploy/client/printer.go b/pkg/deploy/client/printer.go index <HASH>..<HASH> 100644 --- a/pkg/deploy/client/printer.go +++ b/pkg/deploy/client/printer.go @@ -10,7 +10,7 @@ import ( "github.com/openshift/origin/pkg/deploy/api" ) -var deploymentColumns = []string{"ID", "Status"} +var deploymentColumns = []string{"ID", "Status", "Cause"} var deploymentConfigColumns = []string{"ID", "Triggers", "LatestVersion"} // RegisterPrintHandlers registers human-readable printers for deploy types. @@ -22,7 +22,14 @@ func RegisterPrintHandlers(printer *kubecfg.HumanReadablePrinter) { } func printDeployment(d *api.Deployment, w io.Writer) error { - _, err := fmt.Fprintf(w, "%s\t%s\n", d.ID, d.Status) + causes := util.StringSet{} + if d.Details != nil { + for _, cause := range d.Details.Causes { + causes.Insert(string(cause.Type)) + } + } + cStr := strings.Join(causes.List(), ", ") + _, err := fmt.Fprintf(w, "%s\t%s\t%s\n", d.ID, d.Status, cStr) return err }
Printing the deployment causes in the CLI output
openshift_origin
train
ca0da3f6eaed4f19b0b85679df517725db9c1d00
diff --git a/lxd-user/lxd.go b/lxd-user/lxd.go index <HASH>..<HASH> 100644 --- a/lxd-user/lxd.go +++ b/lxd-user/lxd.go @@ -155,7 +155,7 @@ func lxdSetupUser(uid uint32) error { if err != nil { return fmt.Errorf("Failed to create user directory: %w", err) } - revert.Add(func() { os.RemoveAll(userPath) }) + revert.Add(func() { _ = os.RemoveAll(userPath) }) // Generate certificate. err = shared.FindOrGenCert(filepath.Join(userPath, "client.crt"), filepath.Join(userPath, "client.key"), true, false) @@ -169,7 +169,7 @@ func lxdSetupUser(uid uint32) error { return fmt.Errorf("Unable to connect to LXD: %w", err) } - client.GetServer() + _, _, _ = client.GetServer() // Setup the project (with restrictions). projects, err := client.GetProjectNames() @@ -202,7 +202,7 @@ func lxdSetupUser(uid uint32) error { return fmt.Errorf("Unable to create project: %w", err) } - revert.Add(func() { client.DeleteProject(projectName) }) + revert.Add(func() { _ = client.DeleteProject(projectName) }) } // Parse the certificate. @@ -225,7 +225,7 @@ func lxdSetupUser(uid uint32) error { return fmt.Errorf("Unable to add user certificate: %w", err) } - revert.Add(func() { client.DeleteCertificate(shared.CertFingerprint(x509Cert)) }) + revert.Add(func() { _ = client.DeleteCertificate(shared.CertFingerprint(x509Cert)) }) // Setup default profile. err = client.UseProject(projectName).UpdateProfile("default", api.ProfilePut{ diff --git a/lxd-user/main_daemon.go b/lxd-user/main_daemon.go index <HASH>..<HASH> 100644 --- a/lxd-user/main_daemon.go +++ b/lxd-user/main_daemon.go @@ -1,6 +1,7 @@ package main import ( + "errors" "fmt" "net" "os" @@ -91,7 +92,10 @@ func (c *cmdDaemon) Run(cmd *cobra.Command, args []string) error { } else { // Create our own socket. unixPath := "unix.socket" - os.Remove(unixPath) + err := os.Remove(unixPath) + if err != nil && !errors.Is(err, os.ErrNotExist) { + return fmt.Errorf("Failed to delete pre-existing unix socket: %w", err) + } unixAddr, err := net.ResolveUnixAddr("unix", unixPath) if err != nil { diff --git a/lxd-user/proxy.go b/lxd-user/proxy.go index <HASH>..<HASH> 100644 --- a/lxd-user/proxy.go +++ b/lxd-user/proxy.go @@ -61,7 +61,7 @@ func proxyConnection(conn *net.UnixConn) { }() // Close on exit. - defer conn.Close() + defer func() { _ = conn.Close() }() // Get credentials. creds, err := ucred.GetCred(conn) @@ -102,7 +102,7 @@ func proxyConnection(conn *net.UnixConn) { log.Errorf("Unable to connect to target server: %v", err) return } - defer client.Close() + defer func() { _ = client.Close() }() // Get the TLS configuration tlsConfig, err := tlsConfig(creds.Uid) @@ -123,12 +123,12 @@ func proxyConnection(conn *net.UnixConn) { // Establish the TLS handshake. err = tlsClient.Handshake() if err != nil { - conn.Close() + _ = conn.Close() log.Errorf("Failed TLS handshake with target server: %v", err) return } // Start proxying. - go io.Copy(conn, tlsClient) - io.Copy(tlsClient, conn) + go func() { _, _ = io.Copy(conn, tlsClient) }() + _, _ = io.Copy(tlsClient, conn) }
lxd-user: Checks or explicitly ignores errors.
lxc_lxd
train
4c5f0f20ef1a98d3446799cd38148174236596ec
diff --git a/packages/cli-plugin-scaffold-app-graphql/index.js b/packages/cli-plugin-scaffold-app-graphql/index.js index <HASH>..<HASH> 100644 --- a/packages/cli-plugin-scaffold-app-graphql/index.js +++ b/packages/cli-plugin-scaffold-app-graphql/index.js @@ -15,7 +15,7 @@ const createPackageLocation = name => { return `${appsPluginsLocation}/${name}`; }; -const readApiPackageModelName = (file) => { +const readApiPackageEntityName = (file) => { const fileContent = fs .readFileSync(file, { encoding: "utf-8", @@ -29,25 +29,25 @@ const readApiPackageModelName = (file) => { const nameMatched = fileContent.match(/withName\("([a-zA-Z]+)"\)/); if(!nameMatched) { throw new Error( - `Could not find withName() in ${file} which is needed to detect model name.` + `Could not find withName() in ${file} which is needed to detect entity name.` ); } return nameMatched[1]; }; -const findDataModels = (location) => { +const findEntities = (location) => { const target = path.resolve(`${location}/**/*.model.ts`); const files = fastGlob .sync(target, { unique: true, }); if(files.length === 0) { - throw new Error(`Could not find any models with fast-glob pattern "${target}"`); + throw new Error(`Could not find any entities with fast-glob pattern "${target}"`); } return files.map(file => ({ fileName: path.basename(file), filePath: file, - modelName: readApiPackageModelName(file), + entityName: readApiPackageEntityName(file), })) }; @@ -67,44 +67,44 @@ module.exports = [ return true; } if(fs.existsSync(apiLocation) === false) { - return "There is no GraphQL API in given location"; + return "There is no GraphQL API in given location."; } try { - findDataModels(apiLocation); + findEntities(apiLocation); } catch (ex) { - return `Could not find existing API model in ${apiLocation}, error: ${ex.message}`; + return `Could not find existing API entity in ${apiLocation}, error: ${ex.message}.`; } return true; } }, { - name: "existingDataModelName", - message: "Choose data model to use", + name: "existingEntityName", + message: "Choose entity to use", type: "list", when: ({apiLocation}) => { return !!apiLocation; }, choices: ({apiLocation}) => { - const names = findDataModels(apiLocation); - return names.map(({modelName}) => modelName); + const names = findEntities(apiLocation); + return names.map(({entityName}) => entityName); }, validate: (name, {apiLocation}) => { if(!name) { - return "Please enter a data model name"; + return "Please enter a entity name."; } else if(!name.match(/^([a-z]+)$/i)) { return "A valid entity name must consist of letters only."; } try { - const names = findDataModels(apiLocation).map(({modelName}) => modelName); + const names = findEntities(apiLocation).map(({entityName}) => entityName); if(!names.includes(name)) { throw new Error(); } return true; } catch (ex) { - return `A data model with name "${name}" does not exist`; + return `A entity with name "${name}" does not exist.`; } } }, @@ -114,7 +114,7 @@ module.exports = [ default: "books", validate: name => { if(!name) { - return "Please enter a package location"; + return "Please enter a package location."; } const packageLocation = createPackageLocation(name); @@ -135,11 +135,11 @@ module.exports = [ } }, { - name: "dataModelName", - message: "Enter name of the data model", + name: "newEntityName", + message: "Enter name of the entity", default: "Book", - when: ({existingDataModelName}) => { - return !existingDataModelName; + when: ({existingEntityName}) => { + return !existingEntityName; }, validate: (name) => { if(!name.match(/^([a-z]+)$/i)) { @@ -151,9 +151,9 @@ module.exports = [ ]; }, generate: async ({input, oraSpinner}) => { - const {existingDataModelName, packageLocation, dataModelName} = input; + const {existingEntityName, packageLocation, newEntityName} = input; - const modelName = existingDataModelName || dataModelName; + const entityName = existingEntityName || newEntityName; const fullPackageLocation = path.resolve(createPackageLocation(packageLocation)); @@ -181,10 +181,10 @@ module.exports = [ // Copy template files await ncp(sourceFolder, fullPackageLocation); - // Replace generic "Entity" with received "input.existingDataModelName" or "input.dataModelName" argument. + // Replace generic "Entity" with received "input.existingEntityName" or "input.newEntityName" argument. const entity = { - plural: pluralize(Case.camel(modelName)), - singular: pluralize.singular(Case.camel(modelName)) + plural: pluralize(Case.camel(entityName)), + singular: pluralize.singular(Case.camel(entityName)) }; const codeReplacements = [
Switched all namings from "data model" to "entity"
Webiny_webiny-js
train
7b842f92847f19cba86390267663907f85223c81
diff --git a/lib/conjure/service/docker_host.rb b/lib/conjure/service/docker_host.rb index <HASH>..<HASH> 100644 --- a/lib/conjure/service/docker_host.rb +++ b/lib/conjure/service/docker_host.rb @@ -52,6 +52,10 @@ module Conjure result.stdout end + def clean_stopped_processes + command "rm `#{docker_path} ps -a -q`" + end + def shell_escape(text) text.gsub "'", "'\"'\"'" end @@ -133,6 +137,7 @@ module Conjure def build puts "[docker] Building #{@label} image" raise_build_errors(@host.command "build -t #{@label} -", stdin: dockerfile) + @host.clean_stopped_processes end def command(command, options = {})
Conserve memory by cleaning up docker processes after building each image
brianauton_conjure
train
7f74081dc863c7911b877cc6c8fa0f0376e462dc
diff --git a/loky/process_executor.py b/loky/process_executor.py index <HASH>..<HASH> 100644 --- a/loky/process_executor.py +++ b/loky/process_executor.py @@ -819,6 +819,7 @@ class ProcessPoolExecutor(_base.Executor): self._work_ids = queue.Queue() self._processes_management_lock = self._context.Lock() self._queue_management_thread = None + self._atexit = None # _ThreadWakeup is a communication channel used to interrupt the wait # of the main loop of queue_manager_thread from another thread (e.g. @@ -887,9 +888,26 @@ class ProcessPoolExecutor(_base.Executor): self._queue_management_thread.start() # register this executor in a mechanism that ensures it will wakeup - # when the interpreter is exiting. - _threads_wakeups[self._queue_management_thread] = \ - self._queue_management_thread_wakeup + # when the interpreter is exiting. Use an exitpriority of 20 to be + # called before the multiprocessing.Queue._close which have an + # exitpriority of 10. + self._atexit = mp.util.Finalize( + self, self._wakeup_executor_at_exit, + [weakref.ref(self._queue_management_thread), + weakref.ref(self._queue_management_thread_wakeup)], + exitpriority=20) + + @staticmethod + def _wakeup_executor_at_exit(qmt_wr, qmtw_wr): + global _global_shutdown + _global_shutdown = True + qmtw = qmtw_wr() + if qmtw is not None: + qmtw.wakeup() + qmt = qmt_wr() + if qmt is not None: + qmt.join() + mp.util.debug('... queue management thread joined') def _adjust_process_count(self): for _ in range(len(self._processes), self._max_workers): @@ -979,6 +997,13 @@ class ProcessPoolExecutor(_base.Executor): def shutdown(self, wait=True, kill_workers=False): mp.util.debug('shutting down executor %s' % self) + + # As we are manually shutting down, we do not need the atexit mechanism + # anymore. Cancel it to avoid growing the list of finalizers. + if self._atexit: + self._atexit.cancel() + self._atexit = None + self._flags.flag_as_shutting_down(kill_workers) qmt = self._queue_management_thread qmtw = self._queue_management_thread_wakeup @@ -1012,8 +1037,3 @@ class ProcessPoolExecutor(_base.Executor): # Can happen in case of concurrent calls to shutdown. pass shutdown.__doc__ = _base.Executor.shutdown.__doc__ - - -# Use an exitpriority of 20 to be called before the multiprocessing.Queue -# Finalize which have a exitpriority 10. -mp.util.Finalize(None, _python_exit, exitpriority=20)
FIX use a Finalize that correctly clean up the ProcessPoolExecutor
tomMoral_loky
train
754771e04d4a06844d7b0950cffb32f512c21b82
diff --git a/src/main/resources/META-INF/resources/primefaces/idlemonitor/1-idlemonitor.js b/src/main/resources/META-INF/resources/primefaces/idlemonitor/1-idlemonitor.js index <HASH>..<HASH> 100644 --- a/src/main/resources/META-INF/resources/primefaces/idlemonitor/1-idlemonitor.js +++ b/src/main/resources/META-INF/resources/primefaces/idlemonitor/1-idlemonitor.js @@ -18,7 +18,7 @@ PrimeFaces.widget.IdleMonitor = PrimeFaces.widget.BaseWidget.extend({ }) .on("active.idleTimer" + this.cfg.id, function(){ if($this.cfg.onactive) { - $this.cfg.onactive.call(this); + $this.cfg.onactive.call($this); } $this.callBehavior('active');
Fix #<I>: IdleMonitor onactive called with widget * IdleMonitor: Change the this context of the onactive callback to the widget instance.
primefaces_primefaces
train
defdd28bf84000d59d8455b8e1d74b46130f4e29
diff --git a/lib/jekyll_picture_tag/srcsets/basic.rb b/lib/jekyll_picture_tag/srcsets/basic.rb index <HASH>..<HASH> 100644 --- a/lib/jekyll_picture_tag/srcsets/basic.rb +++ b/lib/jekyll_picture_tag/srcsets/basic.rb @@ -86,11 +86,7 @@ module PictureTag end def source_width - @source_width ||= if PictureTag.crop(@media) - target_files.first.source_width - else - @source_image.width - end + source_image.width end def target_files
Remove crop logic from srcset
robwierzbowski_jekyll-picture-tag
train
c5e0babc7a418f91175642caaca9848c844124e9
diff --git a/CHANGELOG.md b/CHANGELOG.md index <HASH>..<HASH> 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,7 @@ CHANGELOG v0.9.3 * Use 'rule-book.js' and 'rule-book.json' as default values for a configuration file * Rename "decorator" to "decorators" in the task-options - no backwards compatibility yet +* Add error logging to cucumber and mocha hooks v0.9.2 11/10/14 * Add "report" property for task to turn on/off reporting for the task diff --git a/lib/task/client/cucumber.js b/lib/task/client/cucumber.js index <HASH>..<HASH> 100644 --- a/lib/task/client/cucumber.js +++ b/lib/task/client/cucumber.js @@ -31,6 +31,7 @@ var CucumberClient = AbstractClient.extend( self.processBefore().then(function () { callback(); }, function (err) { + console.error(err.stack); callback(err); }); }); @@ -38,6 +39,7 @@ var CucumberClient = AbstractClient.extend( self.processAfter().then(function () { callback(); }, function (err) { + console.error(err.stack); callback(err); }); }); @@ -46,6 +48,7 @@ var CucumberClient = AbstractClient.extend( self.processBeforeTest().then(function () { callback(); }, function (err) { + console.error(err.stack); callback(err); }); }); @@ -53,6 +56,7 @@ var CucumberClient = AbstractClient.extend( self.processAfterTest().then(function () { callback(); }, function (err) { + console.error(err.stack); callback(err); }); }); diff --git a/lib/task/client/mocha.js b/lib/task/client/mocha.js index <HASH>..<HASH> 100644 --- a/lib/task/client/mocha.js +++ b/lib/task/client/mocha.js @@ -37,6 +37,7 @@ var MochaClient = AbstractClient.extend( self.processBefore().then(function () { done(); }, function (err) { + console.error(err.stack); done(err); }); }); @@ -44,6 +45,7 @@ var MochaClient = AbstractClient.extend( self.processAfter().then(function () { done(); }, function (err) { + console.error(err.stack); done(err); }); }); @@ -51,6 +53,7 @@ var MochaClient = AbstractClient.extend( self.processBeforeTest().then(function () { done(); }, function (err) { + console.error(err.stack); done(err); }); }); @@ -58,6 +61,7 @@ var MochaClient = AbstractClient.extend( self.processAfterTest().then(function () { done(); }, function (err) { + console.error(err.stack); done(err); }); });
Add more error logging to client hooks
yahoo_preceptor
train
2818e52df256906608bced71779d617216b2d465
diff --git a/packages/diffhtml/lib/transaction.js b/packages/diffhtml/lib/transaction.js index <HASH>..<HASH> 100644 --- a/packages/diffhtml/lib/transaction.js +++ b/packages/diffhtml/lib/transaction.js @@ -290,7 +290,7 @@ export default class Transaction { // oldTree (current state) is solidified to not accidentially deallocate // something required. This allows VTrees to be reused quicker and reduce // memory overload. - { gc } + gc(); return this; }
Bring back the gc function, accidentally omitted
tbranyen_diffhtml
train
5e3a3d3c0aefc32bba98a6fad861430745c24dc3
diff --git a/tests.py b/tests.py index <HASH>..<HASH> 100644 --- a/tests.py +++ b/tests.py @@ -111,7 +111,7 @@ class TestTimeZoneBase(unittest.TestCase): def assertTimezoneEqual(self, actual, expected): # For UTC we check actual identity if expected is pytz.utc: - self.assertIs(pytz.utc, actual) + self.assertTrue(pytz.utc is actual) # For very simple timezones, we assert the timezones are equal. elif isinstance(expected, pytz._FixedOffset):
Remove assertIs for Python <I> support.
mithro_python-datetime-tz
train
b9efda60bec202a01e91468625c40a5876790b40
diff --git a/src/test/java/org/apache/zab/QuorumZabTest.java b/src/test/java/org/apache/zab/QuorumZabTest.java index <HASH>..<HASH> 100644 --- a/src/test/java/org/apache/zab/QuorumZabTest.java +++ b/src/test/java/org/apache/zab/QuorumZabTest.java @@ -1055,7 +1055,7 @@ public class QuorumZabTest extends TestBase { LOG.error("Interrupted!"); } throw new SimulatedException(String.format("%s crashed " - + "before synchronizing phase", server1)); + + "in broadcasting phase", server1)); } } }; diff --git a/src/test/java/org/apache/zab/TestBase.java b/src/test/java/org/apache/zab/TestBase.java index <HASH>..<HASH> 100644 --- a/src/test/java/org/apache/zab/TestBase.java +++ b/src/test/java/org/apache/zab/TestBase.java @@ -50,16 +50,25 @@ public class TestBase { protected void starting(Description description) { DummyTransport.clearMessageQueue(); LOG.info("STARTING: {}", description); + LOG.debug("Before {} : Number of threads {}", + description, + Thread.getAllStackTraces().keySet().size()); } @Override protected void failed(Throwable e, Description description) { LOG.error("FAILED: {}", description, e); + LOG.debug("After {} : Number of threads {}", + description, + Thread.getAllStackTraces().keySet().size()); } @Override protected void succeeded(Description description) { LOG.info("SUCCEEDED: {}", description); + LOG.debug("After {} : Number of threads {}", + description, + Thread.getAllStackTraces().keySet().size()); } };
Print out number of active threads before/after each test.
zk1931_jzab
train
264019d7bf6839d1c35f20ef73b9987aaaa803e2
diff --git a/multiqc/multiqc.py b/multiqc/multiqc.py index <HASH>..<HASH> 100644 --- a/multiqc/multiqc.py +++ b/multiqc/multiqc.py @@ -534,7 +534,7 @@ def run( if len(getattr(config, "run_modules", {})) > 0: run_modules = [m for m in run_modules if list(m.keys())[0] in config.run_modules] - logger.info("Only using modules {}".format(", ".join(config.run_modules))) + logger.info("Only using modules: {}".format(", ".join(config.run_modules))) elif modules_from_tags: run_modules = [m for m in run_modules if list(m.keys())[0] in modules_from_tags] logger.info("Only using modules with '{}' tag".format(", ".join(module_tag))) @@ -584,7 +584,7 @@ def run( # Get the list of files to search for d in config.analysis_dir: - logger.info("Searching : {}".format(os.path.abspath(d))) + logger.info("Search path : {}".format(os.path.abspath(d))) report.get_filelist(run_module_names) # Run the modules! diff --git a/multiqc/utils/log.py b/multiqc/utils/log.py index <HASH>..<HASH> 100644 --- a/multiqc/utils/log.py +++ b/multiqc/utils/log.py @@ -35,7 +35,7 @@ def init_log(logger, loglevel=0, no_ansi=False): # Logging templates debug_template = "[%(asctime)s] %(name)-50s [%(levelname)-7s] %(message)s" - info_template = "|%(module)15s | %(message)s" + info_template = "|%(module)18s | %(message)s" # Base level setup logger.setLevel(getattr(logging, "DEBUG")) diff --git a/multiqc/utils/report.py b/multiqc/utils/report.py index <HASH>..<HASH> 100644 --- a/multiqc/utils/report.py +++ b/multiqc/utils/report.py @@ -256,15 +256,16 @@ def get_filelist(run_module_names): # Search through collected files progress_obj = rich.progress.Progress( - "[progress.description]{task.description}", + "[blue]|[/] ", rich.progress.SpinnerColumn(), + "[blue]{task.description}[/] |", rich.progress.BarColumn(), "[progress.percentage]{task.percentage:>3.0f}%", "[green]{task.completed}/{task.total}", "[dim]{task.fields[s_fn]}", ) with progress_obj as progress: - mqc_task = progress.add_task("| [blue]searching[/] |", total=len(searchfiles), s_fn="") + mqc_task = progress.add_task("searching", total=len(searchfiles), s_fn="") for sf in searchfiles: progress.update(mqc_task, advance=1, s_fn=os.path.join(sf[1], sf[0])[-50:]) if not add_file(sf[0], sf[1]):
Move search spinner behind 'searching' text
ewels_MultiQC
train
a0670c533968ac79a58b111af5a6c996ea6110de
diff --git a/error_default.go b/error_default.go index <HASH>..<HASH> 100644 --- a/error_default.go +++ b/error_default.go @@ -39,7 +39,11 @@ func (e *DefaultError) Wrap(err error) { } func (e *DefaultError) WithTrace(err error) *DefaultError { - e.Wrap(err) + if st := stackTracer(nil); !sdterr.As(e.err, &st) { + e.Wrap(errors.WithStack(err)) + } else { + e.Wrap(err) + } return e } diff --git a/json_test.go b/json_test.go index <HASH>..<HASH> 100644 --- a/json_test.go +++ b/json_test.go @@ -22,6 +22,7 @@ package herodot import ( "bytes" "encoding/json" + stderr "errors" "fmt" "io/ioutil" "net/http" @@ -57,6 +58,7 @@ func (s *statusCodeError) StatusCode() int { } func TestWriteError(t *testing.T) { + tracedErr := errors.New("err") for k, tc := range []struct { err error expect *DefaultError @@ -64,10 +66,20 @@ func TestWriteError(t *testing.T) { {err: exampleError, expect: exampleError}, {err: errors.WithStack(exampleError), expect: exampleError}, {err: onlyStatusCodeError, expect: &DefaultError{StatusField: http.StatusText(http.StatusNotFound), CodeField: http.StatusNotFound, ErrorField: "foo"}}, - // {err: errors.WithStack(onlyStatusCodeError), expect: &DefaultError{CodeField: http.StatusNotFound, ErrorField: "foo"}}, - // {err: errors.New("foo"), expect: &DefaultError{CodeField: http.StatusInternalServerError, ErrorField: "foo"}}, - // {err: errors.WithStack(errors.New("foo1")), expect: &DefaultError{CodeField: http.StatusInternalServerError, ErrorField: "foo1"}}, - // {err: stderr.New("foo1"), expect: &DefaultError{CodeField: http.StatusInternalServerError, ErrorField: "foo1"}}, + {err: errors.WithStack(onlyStatusCodeError), expect: &DefaultError{StatusField: http.StatusText(http.StatusNotFound), CodeField: http.StatusNotFound, ErrorField: "foo"}}, + {err: errors.New("foo"), expect: &DefaultError{StatusField: http.StatusText(http.StatusInternalServerError), CodeField: http.StatusInternalServerError, ErrorField: "foo"}}, + {err: errors.WithStack(errors.New("foo1")), expect: &DefaultError{StatusField: http.StatusText(http.StatusInternalServerError), CodeField: http.StatusInternalServerError, ErrorField: "foo1"}}, + {err: stderr.New("foo1"), expect: &DefaultError{StatusField: http.StatusText(http.StatusInternalServerError), CodeField: http.StatusInternalServerError, ErrorField: "foo1"}}, + { + err: ErrInternalServerError.WithTrace(tracedErr).WithReasonf("Unable to prepare JSON Schema for HTTP Post Body Form parsing: %s", tracedErr).WithDebugf("%+v", tracedErr), + expect: &DefaultError{ + ReasonField: fmt.Sprintf("Unable to prepare JSON Schema for HTTP Post Body Form parsing: %s", tracedErr), + StatusField: http.StatusText(http.StatusInternalServerError), + CodeField: http.StatusInternalServerError, + ErrorField: "An internal server error occurred, please contact the system administrator", + DebugField: fmt.Sprintf("%+v", tracedErr), + }, + }, } { t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) { var j jsonError
fix: fix broken warp function (#<I>)
ory_herodot
train
157745e6df981d60086d9c747931f620f301dea6
diff --git a/test/runner/runner.go b/test/runner/runner.go index <HASH>..<HASH> 100644 --- a/test/runner/runner.go +++ b/test/runner/runner.go @@ -539,11 +539,11 @@ func (r *Runner) updateStatus(b *Build, state, targetUrl string) { req.Header.Set("Authorization", "token "+r.githubToken) res, err := http.DefaultClient.Do(req) - defer res.Body.Close() if err != nil { log.Printf("updateStatus: could not send request: %s\n", err) return } + res.Body.Close() if res.StatusCode != 201 { log.Printf("updateStatus: request failed: %d\n", res.StatusCode) }
test: Prevent panic when posting Github status
flynn_flynn
train
d7c166532216596271fc530351c0e2f63676f2b7
diff --git a/generators/common/files.go b/generators/common/files.go index <HASH>..<HASH> 100644 --- a/generators/common/files.go +++ b/generators/common/files.go @@ -7,14 +7,17 @@ import ( "strings" ) +// File represents the file to be templated type File struct { ReadPath string WritePath string Body string } +// Files is a slice of File type Files []File +// Find all the .tmpl files inside the buffalo GOPATH func Find(path string) (Files, error) { root := filepath.Join(os.Getenv("GOPATH"), "src", "github.com", "gobuffalo", "buffalo", "generators", path) files := Files{} diff --git a/generators/newapp/new.go b/generators/newapp/new.go index <HASH>..<HASH> 100644 --- a/generators/newapp/new.go +++ b/generators/newapp/new.go @@ -10,6 +10,7 @@ import ( sg "github.com/markbates/pop/soda/cmd/generate" ) +// App is the representation of a new Buffalo application type App struct { RootPath string Name string @@ -22,6 +23,7 @@ type App struct { CIProvider string } +// Generator returns a generator to create a new application func (a *App) Generator(data gentronics.Data) (*gentronics.Generator, error) { g := gentronics.New() files, err := common.Find("newapp") @@ -49,13 +51,13 @@ func (a *App) Generator(data gentronics.Data) (*gentronics.Generator, error) { g.Add(gentronics.NewCommand(generate.GoInstall("github.com/motemen/gore"))) g.Add(generate.NewWebpackGenerator(data)) g.Add(newSodaGenerator()) - g.Add(gentronics.NewCommand(a.GoGet())) + g.Add(gentronics.NewCommand(a.goGet())) g.Add(generate.Fmt) return g, nil } -func (a App) GoGet() *exec.Cmd { +func (a App) goGet() *exec.Cmd { appArgs := []string{"get", "-t"} if a.Verbose { appArgs = append(appArgs, "-v") diff --git a/generators/refresh/refresh.go b/generators/refresh/refresh.go index <HASH>..<HASH> 100644 --- a/generators/refresh/refresh.go +++ b/generators/refresh/refresh.go @@ -2,6 +2,7 @@ package refresh import "github.com/markbates/gentronics" +// New generator for a .buffalo.dev.yml file func New() *gentronics.Generator { g := gentronics.New()
make travis/code climate happy
gobuffalo_buffalo
train
39d57549226f3df07eaf476626baeb215bae8fda
diff --git a/yandextank/core/tankcore.py b/yandextank/core/tankcore.py index <HASH>..<HASH> 100644 --- a/yandextank/core/tankcore.py +++ b/yandextank/core/tankcore.py @@ -207,6 +207,8 @@ class TankCore(object): self.lock_file = None self.flush_config_to = None self.lock_dir = None + self.taskset_path = None + self.taskset_cpu = None self.set_option(self.SECTION, self.PID_OPTION, str(os.getpid())) def get_available_options(self): @@ -242,6 +244,8 @@ class TankCore(object): self.SECTION, "artifacts_base_dir", self.artifacts_base_dir) self.artifacts_base_dir = os.path.expanduser(base_dir) self.artifacts_dir = self.get_option(self.SECTION, "artifacts_dir", "") + self.taskset_path = self.get_option(self.SECTION, 'taskset_path', 'taskset') + self.taskset_cpu = self.get_option(self.SECTION, 'taskset_cpu', '') options = self.config.get_options(self.SECTION, self.PLUGIN_PREFIX) for (plugin_name, plugin_path) in options: @@ -263,6 +267,8 @@ class TankCore(object): os.chmod(self.artifacts_base_dir, 0755) self.log.info("Configuring plugins...") + if self.taskset_cpu != '': + self.taskset(os.getpid(), self.taskset_path, self.taskset_cpu) for plugin in self.plugins: self.log.debug("Configuring %s", plugin) plugin.configure() @@ -359,6 +365,17 @@ class TankCore(object): return retcode + def taskset(self, pid, path, cpu): + if cpu != '': + args = "%s -pc %s %s" % (path, cpu, pid) + retcode, stdout, stderr = execute( + args, shell=True, poll_period=0.1, catch_out=True) + self.log.debug('taskset stdout: %s', stdout) + if retcode != 0: + raise KeyError(stderr) + else: + self.log.info("Enabled taskset for pid %s with affinity %s", str(pid), cpu) + def __collect_artifacts(self): self.log.debug("Collecting artifacts") if not self.artifacts_dir:
Added taskset for tankcore
yandex_yandex-tank
train
851466c37fcbbb02606f93d2e027345fb8f1b0d6
diff --git a/src/JWTGuard.php b/src/JWTGuard.php index <HASH>..<HASH> 100644 --- a/src/JWTGuard.php +++ b/src/JWTGuard.php @@ -166,7 +166,7 @@ class JWTGuard implements Guard public function attempt(array $credentials = []) { $this->lastAttempted = $user = $this->provider->retrieveByCredentials($credentials); - if ($this->hasValidCredentials($user, $credentials)) { + if (!is_null($user) && $this->hasValidCredentials($user, $credentials)) { return $this->login($user); } return false;
bug fix: check if valid instance of user is returned from retrieveByCredentials() before passing to hasValidCredentials()
gboyegadada_lumen-jwt
train
bb9e90d5131ad9376112e03f9372fef94f108fd2
diff --git a/src/service/__init__.py b/src/service/__init__.py index <HASH>..<HASH> 100644 --- a/src/service/__init__.py +++ b/src/service/__init__.py @@ -202,12 +202,12 @@ class Service(object): # handlers for signals will be passed a integer value representing the # signal. Pyhton 3 defines signals as ``enum.Enum`` objects, but the # integer value of the enum must be stored to retrieve the event later. - self.signal_state = { + self.signal_events = { int(signal.SIGTERM): threading.Event() } if custom_signals is not None: for sig_symbol in custom_signals: - self.signal_state[int(sig_symbol)] = threading.Event() + self.signal_events[int(sig_symbol)] = threading.Event() self.logger = logging.getLogger(name) if not self.logger.handlers: self.logger.addHandler(logging.NullHandler()) @@ -271,16 +271,19 @@ class Service(object): def send_signal(self, sig_symbol): """ - Sends an arbitrary operating system signal to the daemon process. + Sends an preconfigured operating system signal to the daemon process. - Does not check if the signal is configured. Returns ``True`` if the - signal is configured, else ``False`` + Returns ``True`` if the signal is configured, else ``False`` """ - pid = self.get_pid() - if not pid: - raise ValueError('Daemon is not running.') - os.kill(pid, sig_symbol) - return int(sig_symbol) in self.signal_state + if int(sig_symbol) in self.signal_events: + pid = self.get_pid() + if not pid: + raise ValueError('Daemon is not running.') + os.kill(pid, sig_symbol) + return True + else: + return False + def got_signal(self, sig_symbol, clear=False): """ @@ -298,8 +301,8 @@ class Service(object): from the daemon process or if the signal is not configured """ sig_num = int(sig_symbol) - if sig_num in self.signal_state: - state = self.signal_state[sig_num].is_set() + if sig_num in self.signal_events: + state = self.signal_events[sig_num].is_set() if clear: self.clear_signal(sig_num) return state @@ -313,8 +316,8 @@ class Service(object): Returns ``True`` signal is configured, else ``False`` """ sig_num = int(sig_symbol) - if sig_num in self.signal_state: - self.signal_state[sig_num].clear() + if sig_num in self.signal_events: + self.signal_events[sig_num].clear() return True else: return False @@ -338,8 +341,8 @@ class Service(object): timeout) when it is not called from the daemon process. """ sig_num = int(sig_symbol) - if sig_num in self.signal_state: - return self.signal_state[sig_num].wait(timeout) + if sig_num in self.signal_events: + return self.signal_events[sig_num].wait(timeout) else: return False @@ -481,7 +484,7 @@ class Service(object): def on_signal(signum, frame): self._debug('Received %s signal' % signum) self._debug(type(signum)) - self.signal_state[int(signum)].set() + self.signal_events[int(signum)].set() def runner(): try: @@ -510,7 +513,7 @@ class Service(object): files_preserve = (self.files_preserve + self._get_logger_file_handles()) signal_map = dict() - for signum in self.signal_state: + for signum in self.signal_events: signal_map[signum] = on_signal dont_capture = { signal.SIGTTIN: None,
changed ``send_signal`` to only process preconfigured signals also changed some wording
torfsen_service
train
4f380a106b09d074dc4f034102ddbd91cc15c67e
diff --git a/Tests/Entity/Parameters/BodyParameterTest.php b/Tests/Entity/Parameters/BodyParameterTest.php index <HASH>..<HASH> 100644 --- a/Tests/Entity/Parameters/BodyParameterTest.php +++ b/Tests/Entity/Parameters/BodyParameterTest.php @@ -59,6 +59,9 @@ class BodyParameterTest extends \PHPUnit_Framework_TestCase 'name' => 'foo', 'description' => 'bar', 'required' => false, + 'schema' => [ + 'type' => 'string' + ] ]); $parameter = $this->getSerializer()->deserialize($data, AbstractParameter::class, 'json');
Adjusted serialization test to fail on nested schema
epfremmer_swagger-php
train
6de993b4688a94ff7e6ff822a43b9bb3e54a8595
diff --git a/embed/config.go b/embed/config.go index <HASH>..<HASH> 100644 --- a/embed/config.go +++ b/embed/config.go @@ -160,7 +160,7 @@ func NewConfig() *Config { } func ConfigFromFile(path string) (*Config, error) { - cfg := &configYAML{} + cfg := &configYAML{Config: *NewConfig()} if err := cfg.configFromFile(path); err != nil { return nil, err }
embed: load config defaults before loading config from file
etcd-io_etcd
train
24c9e3b48c043763eecd09e23b3799754e211a8d
diff --git a/lib/countries/data.rb b/lib/countries/data.rb index <HASH>..<HASH> 100644 --- a/lib/countries/data.rb +++ b/lib/countries/data.rb @@ -7,7 +7,7 @@ module ISO3166 end def call - cache(@alpha2) + CACHE[@alpha2] end def self.codes @@ -16,11 +16,6 @@ module ISO3166 private - def cache(alpha2) - @@data ||= Data.load_marshal(['cache', "countries"]) - @@data[alpha2] - end - def self.datafile_path(file_array) File.join([File.dirname(__FILE__), '..'] + file_array) end @@ -29,8 +24,6 @@ module ISO3166 YAML.load_file(datafile_path(file_array)) end - def self.load_marshal(file_array) - Marshal.load(File.binread(datafile_path(file_array))) - end + CACHE = Marshal.load(File.binread(Data.datafile_path(%w(cache countries)))) end end
Go back to constant cache on boot, as size of cache is no longer an issue.
hexorx_countries
train
26fd77c4f2d9d1c8299cca65c948d4265c3f920d
diff --git a/src/auth/PasswordlessAuthenticator.js b/src/auth/PasswordlessAuthenticator.js index <HASH>..<HASH> 100644 --- a/src/auth/PasswordlessAuthenticator.js +++ b/src/auth/PasswordlessAuthenticator.js @@ -4,6 +4,11 @@ var ArgumentError = require('../exceptions').ArgumentError; var RestClient = require('rest-facade').Client; +/** + * @class + * Handles authenticator with passwordless flows, e.g. SMS, Touch ID, etc. + * @constructor + */ var PasswordlessAuthenticator = function (options, oauth) { if (!options) { throw new ArgumentError('Missing authenticator options'); @@ -21,6 +26,12 @@ var PasswordlessAuthenticator = function (options, oauth) { }; +/** + * Sign in with the given user credentials. + * + * @method + * @memberOf PasswordlessAuthenticator + */ PasswordlessAuthenticator.prototype.signIn = function (userData, cb) { var defaultFields = { client_id: this.clientId @@ -49,6 +60,12 @@ PasswordlessAuthenticator.prototype.signIn = function (userData, cb) { }; +/** + * Start passwordless flow sending an email. + * + * @method + * @memberOf PasswordlessAuthenticator + */ PasswordlessAuthenticator.prototype.sendEmail = function (userData, cb) { var defaultFields = { client_id: this.clientId @@ -80,6 +97,12 @@ PasswordlessAuthenticator.prototype.sendEmail = function (userData, cb) { }; +/** + * Start passwordless flow sending an SMS. + * + * @method + * @memberOf PasswordlessAuthenticator + */ PasswordlessAuthenticator.prototype.sendSMS = function (userData, cb) { var defaultFields = { client_id: this.clientId
Add comments for the PasswordlessAuthenticator class
auth0_node-auth0
train
07347e7de41357616969685fb7410da4c8d4e198
diff --git a/network/src/main/java/org/jboss/as/network/ClientMapping.java b/network/src/main/java/org/jboss/as/network/ClientMapping.java index <HASH>..<HASH> 100644 --- a/network/src/main/java/org/jboss/as/network/ClientMapping.java +++ b/network/src/main/java/org/jboss/as/network/ClientMapping.java @@ -21,6 +21,7 @@ */ package org.jboss.as.network; +import java.io.Serializable; import java.net.InetAddress; /** @@ -30,7 +31,7 @@ import java.net.InetAddress; * * @author Jason T. Greene */ -public class ClientMapping { +public class ClientMapping implements Serializable { private final InetAddress sourceNetworkAddress; private final byte sourceNetworkMaskBits; private final String destinationAddress;
Mark ClientMapping as serializable since it gets passed around in a clustered cache (and the ClientMapping contents are itself Serializable anyway) was: <I>f<I>f8ed<I>d5f<I>b<I>b5c<I>d5
wildfly_wildfly-core
train
7b6125c40800104da9fead6ed518f59d8d1b11f6
diff --git a/python/phonenumbers/__init__.py b/python/phonenumbers/__init__.py index <HASH>..<HASH> 100644 --- a/python/phonenumbers/__init__.py +++ b/python/phonenumbers/__init__.py @@ -146,7 +146,7 @@ from .phonenumbermatcher import PhoneNumberMatch, PhoneNumberMatcher, Leniency # Version number is taken from the upstream libphonenumber version # together with an indication of the version of the Python-specific code. -__version__ = "8.12.3" +__version__ = "8.12.4" __all__ = ['PhoneNumber', 'CountryCodeSource', 'FrozenPhoneNumber', 'REGION_CODE_FOR_NON_GEO_ENTITY', 'NumberFormat', 'PhoneNumberDesc', 'PhoneMetadata',
Prep for <I> release
daviddrysdale_python-phonenumbers
train
1b53b1e99979f8e4a1065424f48a3f31f339b2af
diff --git a/src/runTests.spec.js b/src/runTests.spec.js index <HASH>..<HASH> 100644 --- a/src/runTests.spec.js +++ b/src/runTests.spec.js @@ -1,4 +1,5 @@ import { omit } from 'rambda' +import { delay } from './delay' import { runTests } from './runTests' const whenTrue = { @@ -18,6 +19,7 @@ const singleCase = { t : 'number', f : 'boolean', } + const runTestsInput = { testSuite : 'foo', data : [ { singleCase } ], @@ -35,3 +37,26 @@ test('missing `testSuite`', () => { }) runTests(runTestsInput) + +const singleCaseAsync = { + foo : 1, + t : 'RAMBDAX_DELAY', + f : 2, +} + +const whenTrueAsync = { + label : '{{tag}} hey hey what can I do', + whenTrue : async x => { + const result = await delay(x.foo) + + expect(result).toBe(x.t) + }, +} + +const runTestsInputAsync = { + testSuite : 'foo', + data : [ { singleCaseAsync } ], + evaluations : [ whenTrueAsync ], +} + +runTests(runTestsInputAsync)
test: run.test when async
selfrefactor_rambdax
train
c92aa1afc62067956c649779ffc0e8dc8816db3b
diff --git a/cmd/kubeadm/app/phases/upgrade/policy.go b/cmd/kubeadm/app/phases/upgrade/policy.go index <HASH>..<HASH> 100644 --- a/cmd/kubeadm/app/phases/upgrade/policy.go +++ b/cmd/kubeadm/app/phases/upgrade/policy.go @@ -118,8 +118,7 @@ func EnforceVersionPolicies(versionGetter VersionGetter, newK8sVersionStr string if kubeadmVersion.Major() > newK8sVersion.Major() || kubeadmVersion.Minor() > newK8sVersion.Minor() { - skewErrors.Mandatory = append(skewErrors.Mandatory, fmt.Errorf("Kubeadm version %s can only be used to upgrade to Kubernetes versions %d.%d", kubeadmVersionStr, kubeadmVersion.Major(), kubeadmVersion.Minor())) - + skewErrors.Skippable = append(skewErrors.Skippable, fmt.Errorf("Kubeadm version %s can only be used to upgrade to Kubernetes version %d.%d", kubeadmVersionStr, kubeadmVersion.Major(), kubeadmVersion.Minor())) } // Detect if the version is unstable and the user didn't allow that diff --git a/cmd/kubeadm/app/phases/upgrade/policy_test.go b/cmd/kubeadm/app/phases/upgrade/policy_test.go index <HASH>..<HASH> 100644 --- a/cmd/kubeadm/app/phases/upgrade/policy_test.go +++ b/cmd/kubeadm/app/phases/upgrade/policy_test.go @@ -75,7 +75,8 @@ func TestEnforceVersionPolicies(t *testing.T) { kubeadmVersion: "v1.10.3", }, newK8sVersion: "v1.9.10", - expectedMandatoryErrs: 2, // version must be higher than v1.10.0, can't upgrade old k8s with newer kubeadm + expectedMandatoryErrs: 1, // version must be higher than v1.10.0 + expectedSkippableErrs: 1, // can't upgrade old k8s with newer kubeadm }, { name: "upgrading two minor versions in one go is not supported", @@ -96,7 +97,8 @@ func TestEnforceVersionPolicies(t *testing.T) { kubeadmVersion: "v1.12.0", }, newK8sVersion: "v1.10.3", - expectedMandatoryErrs: 2, // can't downgrade two minor versions, can't upgrade old k8s with newer kubeadm + expectedMandatoryErrs: 1, // can't downgrade two minor versions + expectedSkippableErrs: 1, // can't upgrade old k8s with newer kubeadm }, { name: "kubeadm version must be higher than the new kube version. However, patch version skews may be forced", @@ -187,7 +189,7 @@ func TestEnforceVersionPolicies(t *testing.T) { kubeadmVersion: "v1.11.0", }, newK8sVersion: "v1.10.6", - expectedMandatoryErrs: 1, // can't upgrade old k8s with newer kubeadm + expectedSkippableErrs: 1, // can't upgrade old k8s with newer kubeadm }, }
Kubeadm/k8s version mismatch is now a skippable error
kubernetes_kubernetes
train
b73be20949ef67aca36ef17f11ddaf9ab45ad3a0
diff --git a/src/main/java/com/bericotech/clavin/gazetteer/GeoName.java b/src/main/java/com/bericotech/clavin/gazetteer/GeoName.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/bericotech/clavin/gazetteer/GeoName.java +++ b/src/main/java/com/bericotech/clavin/gazetteer/GeoName.java @@ -237,13 +237,20 @@ public class GeoName { // ensure this is never null this.alternateNames = Collections.EMPTY_LIST; } - this.preferredName = preferredName != null && !preferredName.trim().isEmpty() ? preferredName.trim() : null; this.latitude = latitude; this.longitude = longitude; + this.primaryCountryCode = primaryCountryCode; + String pccName = primaryCountryCode != null ? primaryCountryCode.name : ""; + if (alternateCountryCodes != null) { + // defensive copy + this.alternateCountryCodes = Collections.unmodifiableList(new ArrayList<CountryCode>(alternateCountryCodes)); + } else { + // ensure this is never null + this.alternateCountryCodes = Collections.EMPTY_LIST; + } this.featureClass = featureClass; + // configure the feature code so top-level territories are distinguishable if (featureCode == FeatureCode.TERR) { - // configure the feature code so top-level territories are distinguishable - String pccName = primaryCountryCode != null ? primaryCountryCode.name : ""; boolean topLevel = (this.name != null && !this.name.isEmpty() && this.name.equals(pccName)) || (this.asciiName != null && !this.asciiName.isEmpty() && this.asciiName.equals(pccName)) || this.alternateNames.contains(pccName); @@ -251,14 +258,18 @@ public class GeoName { } else { this.featureCode = featureCode; } - this.primaryCountryCode = primaryCountryCode; - if (alternateCountryCodes != null) { - // defensive copy - this.alternateCountryCodes = Collections.unmodifiableList(new ArrayList<CountryCode>(alternateCountryCodes)); + // if this is a top level division, use the primary country name as the preferred name; otherwise + // use the name provided or null + boolean usePcc = TOP_LEVEL_FEATURES.contains(featureCode) && !pccName.isEmpty() && + ((this.name != null && !this.name.isEmpty() && this.name.equals(pccName)) || + (this.asciiName != null && !this.asciiName.isEmpty() && this.asciiName.equals(pccName)) || + this.alternateNames.contains(pccName)); + if (usePcc) { + this.preferredName = pccName; } else { - // ensure this is never null - this.alternateCountryCodes = Collections.EMPTY_LIST; + this.preferredName = preferredName != null && !preferredName.trim().isEmpty() ? preferredName.trim() : null; } + this.admin1Code = admin1Code; this.admin2Code = admin2Code; this.admin3Code = admin3Code;
Updated configuration of preferredName so top level administrative divisions (countries, territories) use the name of their primary CountryCode as the preferredName instead of values from the alternateNames database.
Berico-Technologies_CLAVIN
train
85cce2432e476e2245c079cc2f4d5c8eca91f262
diff --git a/src/Repository/CraftingCategoryRepository.php b/src/Repository/CraftingCategoryRepository.php index <HASH>..<HASH> 100644 --- a/src/Repository/CraftingCategoryRepository.php +++ b/src/Repository/CraftingCategoryRepository.php @@ -19,6 +19,26 @@ use Ramsey\Uuid\UuidInterface; */ class CraftingCategoryRepository extends AbstractIdRepositoryWithOrphans { + /** + * Finds the crafting categories with the specified names. + * @param array<string>|string[] $names + * @return array<CraftingCategory>|CraftingCategory[] + */ + public function findByNames(array $names): array + { + if (count($names) === 0) { + return []; + } + + $queryBuilder = $this->entityManager->createQueryBuilder(); + $queryBuilder->select('cc') + ->from(CraftingCategory::class, 'cc') + ->where('cc.name IN (:names)') + ->setParameter('names', array_values($names)); + + return $queryBuilder->getQuery()->getResult(); + } + protected function getEntityClass(): string { return CraftingCategory::class; diff --git a/test/src/Repository/CraftingCategoryRepositoryTest.php b/test/src/Repository/CraftingCategoryRepositoryTest.php index <HASH>..<HASH> 100644 --- a/test/src/Repository/CraftingCategoryRepositoryTest.php +++ b/test/src/Repository/CraftingCategoryRepositoryTest.php @@ -5,6 +5,7 @@ declare(strict_types=1); namespace FactorioItemBrowserTest\Api\Database\Repository; use BluePsyduck\TestHelper\ReflectionTrait; +use Doctrine\ORM\AbstractQuery; use Doctrine\ORM\EntityManagerInterface; use Doctrine\ORM\QueryBuilder; use FactorioItemBrowser\Api\Database\Entity\CraftingCategory; @@ -41,6 +42,70 @@ class CraftingCategoryRepositoryTest extends TestCase } /** + * Tests the findByNames method. + * @covers ::findByNames + */ + public function testFindByNames(): void + { + $names = ['abc', 'def']; + $queryResult = [ + $this->createMock(CraftingCategory::class), + $this->createMock(CraftingCategory::class), + ]; + + $query = $this->createMock(AbstractQuery::class); + $query->expects($this->once()) + ->method('getResult') + ->willReturn($queryResult); + + $queryBuilder = $this->createMock(QueryBuilder::class); + $queryBuilder->expects($this->once()) + ->method('select') + ->with($this->identicalTo('cc')) + ->willReturnSelf(); + $queryBuilder->expects($this->once()) + ->method('from') + ->with($this->identicalTo(CraftingCategory::class), $this->identicalTo('cc')) + ->willReturnSelf(); + $queryBuilder->expects($this->once()) + ->method('where') + ->with($this->identicalTo('cc.name IN (:names)')) + ->willReturnSelf(); + $queryBuilder->expects($this->once()) + ->method('setParameter') + ->with($this->identicalTo('names'), $this->identicalTo($names)) + ->willReturnSelf(); + $queryBuilder->expects($this->once()) + ->method('getQuery') + ->willReturn($query); + + $this->entityManager->expects($this->once()) + ->method('createQueryBuilder') + ->willReturn($queryBuilder); + + $repository = new CraftingCategoryRepository($this->entityManager); + $result = $repository->findByNames($names); + + $this->assertSame($queryResult, $result); + } + + /** + * Tests the findByNames method. + * @covers ::findByNames + */ + public function testFindByNamesWithoutNames(): void + { + $this->entityManager->expects($this->never()) + ->method('createQueryBuilder'); + + $repository = new CraftingCategoryRepository($this->entityManager); + $result = $repository->findByNames([]); + + $this->assertSame([], $result); + } + + + /** * Tests the getEntityClass method. * @throws ReflectionException * @covers ::getEntityClass
Added method findByNames to the CraftingCategoryRepository.
factorio-item-browser_api-database
train
f98ae355693914e45c1c92b510ee30bfedba95f3
diff --git a/packages/stylelint-config-swissquote/src/utils/__tests__/resolveNestedSelector.js b/packages/stylelint-config-swissquote/src/utils/__tests__/resolveNestedSelector.js index <HASH>..<HASH> 100644 --- a/packages/stylelint-config-swissquote/src/utils/__tests__/resolveNestedSelector.js +++ b/packages/stylelint-config-swissquote/src/utils/__tests__/resolveNestedSelector.js @@ -191,3 +191,23 @@ test("Works on compound selector concatenating class names", async t => { `); t.deepEqual(result, [[".Parent--before"], [".Parent--after"]]); }); + +test("Nesting and multiple selectors", async t => { + const result = await postcssProcess(` + .Button, [dir="rtl"] .Button, .Button2 { + .Button__icon, .Button__icon2 { + &:first-child:last-child { + margin: var(--Button--icon-margin) calc(-1 * var(--Button--icon-size)); + } + } + } + `); + t.deepEqual(result, [ + [".Button", ".Button__icon:first-child:last-child"], + ['[dir="rtl"] .Button', ".Button__icon:first-child:last-child"], + [".Button2", ".Button__icon:first-child:last-child"], + [".Button", ".Button__icon2:first-child:last-child"], + ['[dir="rtl"] .Button', ".Button__icon2:first-child:last-child"], + [".Button2", ".Button__icon2:first-child:last-child"] + ]); +}); diff --git a/packages/stylelint-config-swissquote/src/utils/resolveNestedSelector.js b/packages/stylelint-config-swissquote/src/utils/resolveNestedSelector.js index <HASH>..<HASH> 100644 --- a/packages/stylelint-config-swissquote/src/utils/resolveNestedSelector.js +++ b/packages/stylelint-config-swissquote/src/utils/resolveNestedSelector.js @@ -8,7 +8,17 @@ function hasParentSelector(selector) { return selector.some(part => part.selector.indexOf("&") !== -1); } -function insertParent(parentSelector, selector) { +function insertParent(initialParentSelector, initialSelector) { + // As we are mutating the entries, we clone them here to not mess up the underlying data + const parentSelector = initialParentSelector.map(entry => ({ + selector: entry.selector, + node: entry.node.clone() + })); + const selector = initialSelector.map(entry => ({ + selector: entry.selector, + node: entry.node.clone() + })); + for (const index in selector) { if (!hasOwnProperty.call(selector, index)) { continue; @@ -62,8 +72,7 @@ function insertParent(parentSelector, selector) { } function resolveNestedSelector(initialSelector, node) { - var parent = node.parent; - var parentIsNestAtRule = parent.type === "atrule" && parent.name === "nest"; + const parent = node.parent; const selector = Array.isArray(initialSelector) ? initialSelector @@ -72,17 +81,19 @@ function resolveNestedSelector(initialSelector, node) { if (parent.type === "root") { return [selector]; } + + const parentIsNestAtRule = parent.type === "atrule" && parent.name === "nest"; if (parent.type !== "rule" && !parentIsNestAtRule) { return resolveNestedSelector(selector, parent); } - var parentSelectors = parentIsNestAtRule + const parentSelectors = parentIsNestAtRule ? parent.params.split(",").map(s => s.trim()) : parent.selectors; return parentSelectors.reduce((result, parentSelector) => { if (hasParentSelector(selector)) { - var newlyResolvedSelectors = resolveNestedSelector( + const newlyResolvedSelectors = resolveNestedSelector( parentSelector, parent ).map(resolvedParentSelector => @@ -92,9 +103,9 @@ function resolveNestedSelector(initialSelector, node) { return result.concat(newlyResolvedSelectors); } - var combinedSelector = [{ selector: parentSelector, node: parent }].concat( - selector - ); + const combinedSelector = [ + { selector: parentSelector, node: parent } + ].concat(selector); return result.concat(resolveNestedSelector(combinedSelector, parent)); }, []); }
Fix linting with multiple nested selectors
swissquote_crafty
train
b505fd7d4bc411da5279bd5e8809293c8e9bcad4
diff --git a/awkward/array/chunked.py b/awkward/array/chunked.py index <HASH>..<HASH> 100644 --- a/awkward/array/chunked.py +++ b/awkward/array/chunked.py @@ -99,7 +99,7 @@ class ChunkedArray(awkward.array.base.AwkwardArray): except TypeError: raise TypeError("chunks must be iterable") - self._chunks = [awkward.util.toarray(x, awkward.util.CHARTYPE, (awkward.util.numpy.ndarray, awkward.array.base.AwkwardArray)) for x in value] + self._chunks = [awkward.util.toarray(x, awkward.util.DEFAULTTYPE, (awkward.util.numpy.ndarray, awkward.array.base.AwkwardArray)) for x in value] self._types = [None] * len(self._chunks) @property @@ -340,16 +340,19 @@ class ChunkedArray(awkward.array.base.AwkwardArray): # stop can be len(self) if step > 0 # stop can be -1 if step < 0 (not a Python_negative_indices, but an indicator to go all the way to 0) - try: - start_chunkid = self.global2chunkid(start) - except IndexError: - # case A or B start was set beyond len(self), clamp it - if step > 0: - start = len(self) - start_chunkid = len(self._chunks) - else: - start = len(self) - 1 - start_chunkid = len(self._chunks) - 1 + if start == -1: + # case C start below 0 + start_chunkid = -1 + else: + try: + start_chunkid = self.global2chunkid(start) + except IndexError: + if start > 0: + # case A or B start was set beyond len(self), clamp it + start, start_chunkid = len(self), len(self._chunks) + if step < 0: + start -= 1 + start_chunkid -= 1 if stop == -1: # case B or C stop not set with step < 0; go all the way to 0 @@ -360,7 +363,12 @@ class ChunkedArray(awkward.array.base.AwkwardArray): except IndexError: # stop is at or beyond len(self), clamp it stop = len(self) - stop_chunkid = len(self._chunks) + if step > 0: + # we want the chunkid at or to the right of stop (no -1) + stop_chunkid = min(awkward.util.numpy.searchsorted(self.offsets, stop, "right"), len(self._chunks)) + else: + # we want the chunkid to the left of stop + stop_chunkid = max(awkward.util.numpy.searchsorted(self.offsets, stop, "right") - 2, -1) offsets = self.offsets chunks = [] @@ -374,7 +382,7 @@ class ChunkedArray(awkward.array.base.AwkwardArray): else: local_start = self._counts[chunkid] - skip - if chunkid == stop_chunkid: + if chunkid == stop_chunkid - (1 if step > 0 else -1): if stop == -1: local_stop = None else: @@ -383,13 +391,20 @@ class ChunkedArray(awkward.array.base.AwkwardArray): local_stop = None slc = slice(local_start, local_stop, step) - chunks.append(self._chunks[chunkid][slc]) local_start, local_stop, _ = slc.indices(self._counts[chunkid]) - if step > 0: - skip = (local_stop - local_start) % step - else: - skip = -(local_stop - local_start) % -step + if local_stop != local_start: + if step > 0: + skip = (local_stop - local_start) % step + else: + skip = -(local_stop - local_start) % -step + + chunk = self._chunks[chunkid][(slc,) + tail] + if len(chunk) > 0: + chunks.append(chunk) + + if len(chunks) == 0 and len(self._chunks) > 0: + chunks.append(self._chunks[0][(slice(0, 0),) + tail]) # so sliced.type == self.type return self.__class__(chunks)
[skip ci] __getitem__ slice seems to be working
scikit-hep_awkward-array
train
164ba50f65d1392d04c39b38b3a1e376c1cde922
diff --git a/djcelery/backends/cache.py b/djcelery/backends/cache.py index <HASH>..<HASH> 100644 --- a/djcelery/backends/cache.py +++ b/djcelery/backends/cache.py @@ -5,14 +5,14 @@ from datetime import timedelta import django from django.utils.encoding import smart_str -from django.core.cache import cache, get_cache +from django.core.cache import cache, caches from celery import current_app from celery.backends.base import KeyValueStoreBackend # CELERY_CACHE_BACKEND overrides the django-global(tm) backend settings. if current_app.conf.CELERY_CACHE_BACKEND: - cache = get_cache(current_app.conf.CELERY_CACHE_BACKEND) # noqa + cache = caches[current_app.conf.CELERY_CACHE_BACKEND] # noqa class DjangoMemcacheWrapper(object): diff --git a/djcelery/tests/test_backends/test_cache.py b/djcelery/tests/test_backends/test_cache.py index <HASH>..<HASH> 100644 --- a/djcelery/tests/test_backends/test_cache.py +++ b/djcelery/tests/test_backends/test_cache.py @@ -104,12 +104,7 @@ class test_custom_CacheBackend(unittest.TestCase): prev_backend = current_app.conf.CELERY_CACHE_BACKEND prev_module = sys.modules['djcelery.backends.cache'] - if django.VERSION >= (1, 3): - current_app.conf.CELERY_CACHE_BACKEND = \ - 'django.core.cache.backends.dummy.DummyCache' - else: - # Django 1.2 used 'scheme://' style cache backends - current_app.conf.CELERY_CACHE_BACKEND = 'dummy://' + current_app.conf.CELERY_CACHE_BACKEND = 'dummy' sys.modules.pop('djcelery.backends.cache') try: from djcelery.backends.cache import cache diff --git a/tests/settings.py b/tests/settings.py index <HASH>..<HASH> 100644 --- a/tests/settings.py +++ b/tests/settings.py @@ -75,6 +75,15 @@ DATABASES = { }, } +CACHES = { + 'default': { + 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', + }, + 'dummy': { + 'BACKEND': 'django.core.cache.backends.dummy.DummyCache', + }, +} + INSTALLED_APPS = ( 'django.contrib.auth', 'django.contrib.contenttypes',
Fixed caches usage on Django <I>+ (#<I>)
celery_django-celery
train
70df0c20a0feeccebaf63f73cdd128c13ec5823b
diff --git a/integration-tests/src/test/java/tachyon/master/JournalIntegrationTest.java b/integration-tests/src/test/java/tachyon/master/JournalIntegrationTest.java index <HASH>..<HASH> 100644 --- a/integration-tests/src/test/java/tachyon/master/JournalIntegrationTest.java +++ b/integration-tests/src/test/java/tachyon/master/JournalIntegrationTest.java @@ -618,7 +618,7 @@ public class JournalIntegrationTest { PlainSaslServer.AuthorizedClientUser.set("user1"); FileInfo info = fsMaster.getFileInfo(new TachyonURI("/file")); - Assert.assertEquals(status, info); + Assert.assertEquals(status, new URIStatus(info)); fsMaster.stop(); } diff --git a/servers/src/main/java/tachyon/master/file/FileSystemMaster.java b/servers/src/main/java/tachyon/master/file/FileSystemMaster.java index <HASH>..<HASH> 100644 --- a/servers/src/main/java/tachyon/master/file/FileSystemMaster.java +++ b/servers/src/main/java/tachyon/master/file/FileSystemMaster.java @@ -1616,11 +1616,11 @@ public final class FileSystemMaster extends MasterBase { * @throws AccessControlException if permission checking fails */ public void setState(TachyonURI path, SetAttributeOptions options) - throws FileDoesNotExistException, AccessControlException { + throws FileDoesNotExistException, AccessControlException, InvalidPathException { MasterContext.getMasterSource().incSetStateOps(1); synchronized (mInodeTree) { checkPermission(FileSystemAction.WRITE, path, false); - long fileId = getFileId(path); + long fileId = mInodeTree.getInodeByPath(path).getId(); long opTimeMs = System.currentTimeMillis(); setStateInternal(fileId, opTimeMs, options); SetStateEntry.Builder setState = diff --git a/servers/src/test/java/tachyon/master/journal/JournalFormatterTestBase.java b/servers/src/test/java/tachyon/master/journal/JournalFormatterTestBase.java index <HASH>..<HASH> 100644 --- a/servers/src/test/java/tachyon/master/journal/JournalFormatterTestBase.java +++ b/servers/src/test/java/tachyon/master/journal/JournalFormatterTestBase.java @@ -235,14 +235,6 @@ public abstract class JournalFormatterTestBase { .build()) .add( JournalEntry.newBuilder() - .setCompletePartition(CompletePartitionEntry.newBuilder() - .setStoreId(TEST_FILE_ID) - .setBlockId(TEST_BLOCK_ID) - .setKeyLimit(TEST_KEY1) - .setKeyStart(TEST_KEY2)) - .build()) - .add( - JournalEntry.newBuilder() .setSetAcl(File.SetAclEntry.newBuilder() .setId(TEST_FILE_ID) .setOpTimeMs(TEST_OP_TIME_MS)
Update FileSystemMaster to use unprotected calls if necessary.
Alluxio_alluxio
train
9456b763be999f83730640c5385061a91363fb53
diff --git a/test/setup-fixtures.js b/test/setup-fixtures.js index <HASH>..<HASH> 100644 --- a/test/setup-fixtures.js +++ b/test/setup-fixtures.js @@ -61,6 +61,7 @@ var dropDB = function(cb){ function dropCollection(collection, done){ console.log("Dropping ", collection) mongoose.connection.collections[collection].drop(function(err){ + if (err && err.errmsg === 'ns not found') return done(null, collection) done(err, collection) }) } @@ -95,7 +96,7 @@ var connect = function(cb) { mongoose.connection.on('connected', function (err, res) { if(err) throw err - console.log("<--- connected", err, res) + console.log("<--- connected") cb.apply(this, arguments) }); }
don't bail if collection doesn't exist when we drop it
Strider-CD_strider
train
50937c8ed2358a8766e7cf6f61b6bc3fe725c3ac
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -13,7 +13,7 @@ v = open(os.path.join(os.path.dirname(__file__), 'dyndnsc', '__init__.py')) VERSION = re.compile(r".*__version__ = '(.*?)'", re.S).match(v.read()).group(1) v.close() -CLASSIFIERS = [ +CLASSIFIERS = ( 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: DFSG approved', @@ -32,10 +32,31 @@ CLASSIFIERS = [ 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4' -] +) -INSTALL_REQUIRES = ['requests', 'netifaces==0.10.3'] -TESTS_REQUIRE = ['bottle==0.12.7'] + +def patch_test_requires(requires): + '''python version compatibility''' + if sys.version_info < (3, 3): + return requires + ["mock"] + else: + return requires + + +def patch_install_requires(requires): + '''python version compatibility''' + to_add = [] + if sys.version_info < (3, 3): + to_add.append("IPy>=0.56") + if sys.version_info < (3, 2): + to_add.append("argparse") + if sys.version_info < (3, 0): + to_add.append("netifaces==0.10.3") + else: + to_add.append("netifaces-py3==0.8") + if sys.version_info < (2, 7): # continue support for python 2.6 + to_add.append("importlib") + return requires + to_add if sys.version_info >= (3, 0): pass @@ -44,14 +65,6 @@ else: # affects only python2 when using multiprocessing and if nose is installed import multiprocessing -if sys.version_info < (3, 3): - TESTS_REQUIRE.append("mock") - INSTALL_REQUIRES.append("IPy>=0.56") -if sys.version_info < (3, 2): - INSTALL_REQUIRES.append("argparse") -if sys.version_info < (2, 7): # continue support for python 2.6 - INSTALL_REQUIRES.append("importlib") - setup(name='dyndnsc', packages=[ 'dyndnsc', @@ -70,12 +83,12 @@ setup(name='dyndnsc', long_description=(open('README.rst', 'r').read() + '\n\n' + open('CHANGELOG.rst', 'r').read()), url='https://github.com/infothrill/python-dyndnsc', - install_requires=INSTALL_REQUIRES, + install_requires=patch_install_requires(['requests']), entry_points=(""" [console_scripts] dyndnsc=dyndnsc.cli:main """), classifiers=CLASSIFIERS, test_suite='dyndnsc.tests', - tests_require=TESTS_REQUIRE, + tests_require=patch_test_requires(['bottle==0.12.7']) )
netifaces <I> still seems to break our build on travis for some reason. go back to netifaces-py3 for the time being.
infothrill_python-dyndnsc
train
902b15698809cab28c24bf7f834b6f6b2796705c
diff --git a/src/test/java/org/junit/tests/experimental/theories/AllMembersSupplierTest.java b/src/test/java/org/junit/tests/experimental/theories/AllMembersSupplierTest.java index <HASH>..<HASH> 100644 --- a/src/test/java/org/junit/tests/experimental/theories/AllMembersSupplierTest.java +++ b/src/test/java/org/junit/tests/experimental/theories/AllMembersSupplierTest.java @@ -53,7 +53,9 @@ public class AllMembersSupplierTest { public static class HasDataPointsMethodWithNullValue { @DataPoints - public static Integer[] objects = {null, 1}; + public static Integer[] getObjects() { + return new Integer[] {null, 1}; + } public HasDataPointsMethodWithNullValue(Integer i) { }
Issue #<I>: fix test case. (Forgot to modify the class after copying, yielding essentially identical tests.)
junit-team_junit4
train
afc223436cc7ce861d786f6b9965a367ccd9daa0
diff --git a/server/webapp/WEB-INF/rails/app/helpers/application_helper.rb b/server/webapp/WEB-INF/rails/app/helpers/application_helper.rb index <HASH>..<HASH> 100644 --- a/server/webapp/WEB-INF/rails/app/helpers/application_helper.rb +++ b/server/webapp/WEB-INF/rails/app/helpers/application_helper.rb @@ -518,7 +518,8 @@ module ApplicationHelper end def supports_vsm_analytics? - return false unless is_user_an_admin? + return false if show_analytics_only_for_admins? && !is_user_an_admin? + !default_plugin_info_finder.allPluginInfos(PluginConstants.ANALYTICS_EXTENSION).detect do |combined_plugin_info| combined_plugin_info.extensionFor(PluginConstants.ANALYTICS_EXTENSION).getCapabilities().supportsVSMAnalytics() end.nil? diff --git a/server/webapp/WEB-INF/rails/spec/helpers/application_helper_spec.rb b/server/webapp/WEB-INF/rails/spec/helpers/application_helper_spec.rb index <HASH>..<HASH> 100644 --- a/server/webapp/WEB-INF/rails/spec/helpers/application_helper_spec.rb +++ b/server/webapp/WEB-INF/rails/spec/helpers/application_helper_spec.rb @@ -644,10 +644,20 @@ describe ApplicationHelper do expect(supports_vsm_analytics?).to eq(true) end - it "should not support vsm analytics for non-admins" do + it "should support vsm analytics for all users" do def default_plugin_info_finder; @default_plugin_info_finder; end def is_user_an_admin?; false; end + allow(@default_plugin_info_finder).to receive('allPluginInfos').with(PluginConstants.ANALYTICS_EXTENSION).and_return([@plugin_info1, @plugin_info2, @plugin_info3, @plugin_info4]) + + expect(supports_vsm_analytics?).to eq(true) + end + + it "should support vsm analytics only to admins" do + def default_plugin_info_finder; @default_plugin_info_finder; end + def is_user_an_admin?; false; end + def show_analytics_only_for_admins?; true; end + expect(supports_vsm_analytics?).to eq(false) end diff --git a/spark/spark-spa/src/main/java/com/thoughtworks/go/spark/spa/AnalyticsDelegate.java b/spark/spark-spa/src/main/java/com/thoughtworks/go/spark/spa/AnalyticsDelegate.java index <HASH>..<HASH> 100644 --- a/spark/spark-spa/src/main/java/com/thoughtworks/go/spark/spa/AnalyticsDelegate.java +++ b/spark/spark-spa/src/main/java/com/thoughtworks/go/spark/spa/AnalyticsDelegate.java @@ -118,18 +118,15 @@ public class AnalyticsDelegate implements SparkController { } private void checkPermissions(Request request, Response response) { + if (isAnalyticsEnabledOnlyForAdmins()) { + authenticationHelper.checkAdminUserAnd403(request, response); + return; + } + if (isPipelineRequest(request)) { - if (adminsOnly()) { - authenticationHelper.checkAdminUserAnd403(request, response); - } else { - authenticationHelper.checkPipelineViewPermissionsAnd403(request, response); - } - } else if (isAgentRequest(request)) { - if (adminsOnly()) { - authenticationHelper.checkAdminUserAnd403(request, response); - } else { - authenticationHelper.checkUserAnd403(request, response); - } + authenticationHelper.checkPipelineViewPermissionsAnd403(request, response); + } else if (isAgentRequest(request) || isVSMRequest(request)) { + authenticationHelper.checkUserAnd403(request, response); } else { authenticationHelper.checkAdminUserAnd403(request, response); } @@ -143,7 +140,11 @@ public class AnalyticsDelegate implements SparkController { return "agent".equals(request.params(":type")); } - private boolean adminsOnly() { + private boolean isVSMRequest(Request request) { + return "vsm".equalsIgnoreCase(request.params(":type")); + } + + private boolean isAnalyticsEnabledOnlyForAdmins() { return systemEnvironment.enableAnalyticsOnlyForAdmins(); } diff --git a/spark/spark-spa/src/test/groovy/com/thoughtworks/go/spark/spa/AnalyticsDelegateTest.groovy b/spark/spark-spa/src/test/groovy/com/thoughtworks/go/spark/spa/AnalyticsDelegateTest.groovy index <HASH>..<HASH> 100644 --- a/spark/spark-spa/src/test/groovy/com/thoughtworks/go/spark/spa/AnalyticsDelegateTest.groovy +++ b/spark/spark-spa/src/test/groovy/com/thoughtworks/go/spark/spa/AnalyticsDelegateTest.groovy @@ -190,6 +190,17 @@ class AnalyticsDelegateTest implements ControllerTrait<AnalyticsDelegate>, Secur } @Test + void "should allow all users to view VSM analytics"() { + when(systemEnvironment.enableAnalyticsOnlyForAdmins()).thenReturn(false) + + enableSecurity() + loginAsUser() + + get(controller.controllerPath("plugin/vsm/metric")) + assertRequestAllowed() + } + + @Test void "should return 404 when pipeline does not exist"() { when(pipelineConfigService.pipelineConfigNamed(getPipelineName())).thenReturn(null) enableSecurity()
Allow non-admins to view vsm analytics
gocd_gocd
train
e311f784021970392716c8004e858625839019b0
diff --git a/lib/ffprobe.js b/lib/ffprobe.js index <HASH>..<HASH> 100644 --- a/lib/ffprobe.js +++ b/lib/ffprobe.js @@ -105,6 +105,8 @@ module.exports = function(proto) { return callback(new Error('Cannot run ffprobe on stream input')); } + var args = ['-show_streams', '-show_format'].concat(this._currentOutput.options.get(), input.source); + // Find ffprobe this._getFfprobePath(function(err, path) { if (err) { @@ -119,11 +121,7 @@ module.exports = function(proto) { var stderrClosed = false; // Spawn ffprobe - var ffprobe = spawn(path, [ - '-show_streams', - '-show_format', - input.source - ]); + var ffprobe = spawn(path, args); ffprobe.on('error', function(err) { callback(err); @@ -212,4 +210,3 @@ module.exports = function(proto) { }); }; }; -
Allow passing additional output options for ffprobe
fluent-ffmpeg_node-fluent-ffmpeg
train
a1968f88ead3bd91fa2c2e2146c2b208318bda2f
diff --git a/src/Canvas/Rasterization/EdgeTable.php b/src/Canvas/Rasterization/EdgeTable.php index <HASH>..<HASH> 100644 --- a/src/Canvas/Rasterization/EdgeTable.php +++ b/src/Canvas/Rasterization/EdgeTable.php @@ -58,7 +58,7 @@ class EdgeTable */ public function getScanline($y) { - return @$this->scanlines[$y]; + return isset($this->scanlines[$y]) ? $this->scanlines[$y] : null; } /**
Removed @ to prevent php warnings (#5).
dmester_jdenticon-php
train
145fdcce3a5e30932724ea3e43c65d924cd048f0
diff --git a/generated/google/apis/accessapproval_v1.rb b/generated/google/apis/accessapproval_v1.rb index <HASH>..<HASH> 100644 --- a/generated/google/apis/accessapproval_v1.rb +++ b/generated/google/apis/accessapproval_v1.rb @@ -25,7 +25,7 @@ module Google # @see https://cloud.google.com/access-approval/docs module AccessapprovalV1 VERSION = 'V1' - REVISION = '20200930' + REVISION = '20201014' # View and manage your data across Google Cloud Platform services AUTH_CLOUD_PLATFORM = 'https://www.googleapis.com/auth/cloud-platform' diff --git a/generated/google/apis/accessapproval_v1/classes.rb b/generated/google/apis/accessapproval_v1/classes.rb index <HASH>..<HASH> 100644 --- a/generated/google/apis/accessapproval_v1/classes.rb +++ b/generated/google/apis/accessapproval_v1/classes.rb @@ -317,21 +317,19 @@ module Google include Google::Apis::Core::Hashable # The product for which Access Approval will be enrolled. Allowed values are - # listed below (case-sensitive): * all * GA * App Engine * BigQuery * Cloud - # Bigtable * Cloud Key Management Service * Compute Engine * Cloud Dataflow * - # Cloud Identity and Access Management * Cloud Pub/Sub * Cloud Storage * - # Persistent Disk Note: These values are supported as input for legacy purposes, - # but will not be returned from the API. * all * ga-only * appengine.googleapis. - # com * bigquery.googleapis.com * bigtable.googleapis.com * cloudkms.googleapis. - # com * compute.googleapis.com * dataflow.googleapis.com * iam.googleapis.com * - # pubsub.googleapis.com * storage.googleapis.com Calls to - # UpdateAccessApprovalSettings using 'all', 'ga-only', or any of the XXX. - # googleapis.com will be translated to the associated product name ('all', 'GA', - # 'App Engine', etc.). Note: 'all' will enroll the resource in all products - # supported at both 'GA' and 'Preview' levels. 'ga-only'/'GA' will only enroll - # the resource in products supported at 'GA' level. More information about - # levels of support is available at https://cloud.google.com/access-approval/ - # docs/supported-services + # listed below (case-sensitive): * all * App Engine * BigQuery * Cloud Bigtable * + # Cloud Key Management Service * Compute Engine * Cloud Dataflow * Cloud + # Identity and Access Management * Cloud Pub/Sub * Cloud Storage * Persistent + # Disk Note: These values are supported as input for legacy purposes, but will + # not be returned from the API. * all * appengine.googleapis.com * bigquery. + # googleapis.com * bigtable.googleapis.com * cloudkms.googleapis.com * compute. + # googleapis.com * dataflow.googleapis.com * iam.googleapis.com * pubsub. + # googleapis.com * storage.googleapis.com Calls to UpdateAccessApprovalSettings + # using 'all' or any of the XXX.googleapis.com will be translated to the + # associated product name ('all', 'App Engine', etc.). Note: 'all' will enroll + # the resource in all products supported at both 'GA' and 'Preview' levels. More + # information about levels of support is available at https://cloud.google.com/ + # access-approval/docs/supported-services # Corresponds to the JSON property `cloudProduct` # @return [String] attr_accessor :cloud_product diff --git a/generated/google/apis/accessapproval_v1/synth.metadata b/generated/google/apis/accessapproval_v1/synth.metadata index <HASH>..<HASH> 100644 --- a/generated/google/apis/accessapproval_v1/synth.metadata +++ b/generated/google/apis/accessapproval_v1/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/google-api-ruby-client.git", - "sha": "c98c719bbab68d0890524d53f8b629d7858af9c2" + "sha": "cb0c5bf94e2b1c915107eec83041d4409c900155" } } ]
feat: Automated regeneration of accessapproval v1 client
googleapis_google-api-ruby-client
train
bd4e6a37e89214c7c8d5eb98fdf3d0c3329593d1
diff --git a/lib/Url.php b/lib/Url.php index <HASH>..<HASH> 100644 --- a/lib/Url.php +++ b/lib/Url.php @@ -56,7 +56,7 @@ class Url !isset($this->_pass) && !isset($this->_host) && !isset($this->_port) && - !isset($this->_path)) { + (!isset($this->_path) || $this->_path->name()[0] != '/')) { $string = ltrim($string, '/'); } else if (!isset($this->_path) && !isset($this->_fragment) &&
Fix relative URLs in Coast\Url
jacksleight_coast
train
225e7c75b56cfbdd69d1420a2b42ecee0eefc8e4
diff --git a/test/e2e/common/configmap.go b/test/e2e/common/configmap.go index <HASH>..<HASH> 100644 --- a/test/e2e/common/configmap.go +++ b/test/e2e/common/configmap.go @@ -177,6 +177,12 @@ var _ = ginkgo.Describe("[sig-node] ConfigMap", func() { _, err := f.ClientSet.CoreV1().ConfigMaps(testNamespaceName).Create(context.TODO(), &testConfigMap, metav1.CreateOptions{}) framework.ExpectNoError(err, "failed to create ConfigMap") + ginkgo.By("fetching the ConfigMap") + configMap, err := f.ClientSet.CoreV1().ConfigMaps(testNamespaceName).Get(context.TODO(), testConfigMapName, metav1.GetOptions{}) + framework.ExpectNoError(err, "failed to get ConfigMap") + framework.ExpectEqual(configMap.Data["valueName"], testConfigMap.Data["valueName"]) + framework.ExpectEqual(configMap.Labels["test-configmap-static"], testConfigMap.Labels["test-configmap-static"]) + configMapPatchPayload, err := json.Marshal(v1.ConfigMap{ ObjectMeta: metav1.ObjectMeta{ Labels: map[string]string{ @@ -193,39 +199,35 @@ var _ = ginkgo.Describe("[sig-node] ConfigMap", func() { _, err = f.ClientSet.CoreV1().ConfigMaps(testNamespaceName).Patch(context.TODO(), testConfigMapName, types.StrategicMergePatchType, []byte(configMapPatchPayload), metav1.PatchOptions{}) framework.ExpectNoError(err, "failed to patch ConfigMap") - ginkgo.By("fetching the ConfigMap") - configMap, err := f.ClientSet.CoreV1().ConfigMaps(testNamespaceName).Get(context.TODO(), testConfigMapName, metav1.GetOptions{}) - framework.ExpectNoError(err, "failed to get ConfigMap") - framework.ExpectEqual(configMap.Data["valueName"], "value1", "failed to patch ConfigMap") - framework.ExpectEqual(configMap.Labels["test-configmap"], "patched", "failed to patch ConfigMap") - - ginkgo.By("listing all ConfigMaps in all namespaces") + ginkgo.By("listing all ConfigMaps in all namespaces with a label selector") configMapList, err := f.ClientSet.CoreV1().ConfigMaps("").List(context.TODO(), metav1.ListOptions{ - LabelSelector: "test-configmap-static=true", + LabelSelector: "test-configmap=patched", }) framework.ExpectNoError(err, "failed to list ConfigMaps with LabelSelector") - framework.ExpectNotEqual(len(configMapList.Items), 0, "no ConfigMaps found in ConfigMap list") testConfigMapFound := false for _, cm := range configMapList.Items { - if cm.ObjectMeta.Name == testConfigMapName && + if cm.ObjectMeta.Name == testConfigMap.ObjectMeta.Name && cm.ObjectMeta.Namespace == testNamespaceName && - cm.ObjectMeta.Labels["test-configmap-static"] == "true" && + cm.ObjectMeta.Labels["test-configmap-static"] == testConfigMap.ObjectMeta.Labels["test-configmap-static"] && + cm.ObjectMeta.Labels["test-configmap"] == "patched" && cm.Data["valueName"] == "value1" { testConfigMapFound = true break } } - framework.ExpectEqual(testConfigMapFound, true, "failed to find ConfigMap in list") + framework.ExpectEqual(testConfigMapFound, true, "failed to find ConfigMap by label selector") - ginkgo.By("deleting the ConfigMap by a collection") + ginkgo.By("deleting the ConfigMap by collection with a label selector") err = f.ClientSet.CoreV1().ConfigMaps(testNamespaceName).DeleteCollection(context.TODO(), metav1.DeleteOptions{}, metav1.ListOptions{ LabelSelector: "test-configmap-static=true", }) framework.ExpectNoError(err, "failed to delete ConfigMap collection with LabelSelector") - ginkgo.By("listing all ConfigMaps in all namespaces") - configMapList, err = f.ClientSet.CoreV1().ConfigMaps("").List(context.TODO(), metav1.ListOptions{ + + ginkgo.By("listing all ConfigMaps in test namespace") + configMapList, err = f.ClientSet.CoreV1().ConfigMaps(testNamespaceName).List(context.TODO(), metav1.ListOptions{ LabelSelector: "test-configmap-static=true", }) + framework.ExpectNoError(err, "failed to list ConfigMap by LabelSelector") framework.ExpectEqual(len(configMapList.Items), 0, "ConfigMap is still present after being deleted by collection") }) })
Reorder checks slightly, add a list-by-namespace call Now the test covers 6 different api calls - verify create with a get - verify patch with a list (all namespaces) - verify delete with a list (single namespace)
kubernetes_kubernetes
train
2381035a81e7f56943e0c3d6c7d319dea5bb40f5
diff --git a/.travis.yml b/.travis.yml index <HASH>..<HASH> 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,6 +1,13 @@ # https://travis-ci.org/anuragkumarak95/wordnet +# for faster access, use sudo: false +sudo: false + language: python + python: + - 2.6 + - 2.7 + - 3.2 - 3.3 - 3.4 - 3.5 diff --git a/README.md b/README.md index <HASH>..<HASH> 100644 --- a/README.md +++ b/README.md @@ -93,7 +93,7 @@ Three major parts are in this project. To run a formal test, simply run this script. `python test.py`, this module will return **0** if everythinig worked as expected. -test.py uses sample data provided [here](./test/testdata) and executes unittest on `find_tf_idf()`, `find_knn()` & `generate_net()`. +test.py uses sample data provided [here](https://github.com/anuragkumarak95/wordnet/tree/master/test) and executes unittest on `find_tf_idf()`, `find_knn()` & `generate_net()`. > `Streamer` functionality will not be provided under distribution of this code. That is just a script independent from the module. diff --git a/test.py b/test.py index <HASH>..<HASH> 100644 --- a/test.py +++ b/test.py @@ -39,7 +39,7 @@ def test_nnwords(word): # unittest class for Testing. class TestWordNet(unittest.TestCase): - def test_word(self): + def test_Word_module(self): w1 = Word('test',set(['case']),set(['#1'])) w2 = Word('test_new') w2.setw('test') @@ -67,36 +67,27 @@ class TestWordNet(unittest.TestCase): wrng_name = 'test/test.wrng' with self.assertRaises(Exception): generate_net(idf,tf_idf,wrng_name) with self.assertRaises(Exception): retrieve_net(wrng_name) - # WordNet Generate Net module walkthrough - wrnt_name = 'test/test.wrnt' - n = generate_net(idf,tf_idf) - # fetching test case wrnt file. - with open(wrnt_name,'rb') as f: - a_n = pickle.load(f) + #test file + wrnt_name = 'test/test.wrnt' + # WordNet Reatrieve Net module walkthrough + word_net_ret = retrieve_net(wrnt_name) + # WordNet Generate Net module walkthrough + word_net_gen = generate_net(idf,tf_idf) + + # assertion or generated and retrived networks equality. + for word in word_net_gen: + self.assertEquals(word_net_gen[word].w, word_net_ret[word].w) + self.assertEquals(word_net_gen[word].frwrd_links, word_net_ret[word].frwrd_links) - # WordNet Reatrieval module walkthrough - word_net = retrieve_net(wrnt_name) - r_n=[] - for word in word_net: - __temp = [word_net[word].w] - __temp.extend(word_net[word].frwrd_links) - r_n.append(__temp) - del __temp - - # only asserting length right now, as these process are not creating same sequence or words. check needed. - self.assertEquals(len(a_n),len(n)) - self.assertEquals(len(a_n),len(r_n)) - # covering dump section of generate_net() generate_net(idf,tf_idf,'test/dump.wrnt') # cleaning as we move on! os.remove('test/dump.wrnt') - del a_n - del r_n - del n - + del word_net_gen + del word_net_ret + if __name__=="__main__": __init__() unittest.main() diff --git a/wordnet/tf_idf_generator.py b/wordnet/tf_idf_generator.py index <HASH>..<HASH> 100644 --- a/wordnet/tf_idf_generator.py +++ b/wordnet/tf_idf_generator.py @@ -83,7 +83,7 @@ def find_tf_idf(file_names=['./../test/testdata'],prev_file_path=None, dump_path for doc in tf_idf: for key in doc: true_idf = math.log(len(tf_idf)/idf[key]) - true_tf = doc[key]/len(doc) + true_tf = doc[key]/float(len(doc)) doc[key] = true_tf * true_idf print(TAG,'Total number of unique words in corpus',len(idf),'( '+paint('++'+str(len(idf)-prev_doc_count),'g')+' )' if prev_file_path else '') diff --git a/wordnet/word_net.py b/wordnet/word_net.py index <HASH>..<HASH> 100644 --- a/wordnet/word_net.py +++ b/wordnet/word_net.py @@ -90,7 +90,7 @@ def retrieve_net(wrnt_path): # Generating Word() instance dictionary from retrieved network. word_net = {} for n in network: - word_net[n[0]] = Word(n[0],None,set(n[1:])) + word_net[n[0]] = Word(n[0],None,set([network[i][0] for i in n[1:]])) # deleting useless resources, for efficient memory usage. del network return word_net \ No newline at end of file
updated tf_idf_generator.py, solved error issue from python<I>,<I>,<I>. updated .travis.yml,added older versions for python. updated word_net.py, now retrieve_net() is retunrning exactly what is generated by generate_net().
anuragkumarak95_wordnet
train
a57392e18bfab9786a6f7caaeef98b8d45392c3d
diff --git a/tools/set_wv_parameters.py b/tools/set_wv_parameters.py index <HASH>..<HASH> 100644 --- a/tools/set_wv_parameters.py +++ b/tools/set_wv_parameters.py @@ -89,11 +89,19 @@ def set_wv_parameters(filter_name, grism_name): elif grism_name == "K" and filter_name == "Ksp": wv_parameters['islitlet_min'] = 2 wv_parameters['islitlet_max'] = 54 - wv_parameters['nbrightlines'] = [0] - wv_parameters['poly_crval1_linear'] = None - wv_parameters['poly_cdelt1_linear'] = None - wv_parameters['crval1_enlarged'] = None # 19000.0000 # Angstroms - wv_parameters['cdelt1_enlarged'] = 1.7000 # Angstroms/pixel + wv_parameters['nbrightlines'] = [15] + wv_parameters['poly_crval1_linear'] = np.polynomial.Polynomial([ + 2.21095313e+04, + -1.08900414e+01, + 9.66474839e-04 + ]) + wv_parameters['poly_cdelt1_linear'] = np.polynomial.Polynomial([ + 1.72596244e+00, + 2.85573046e-05, + -1.30027272e-07 + ]) + wv_parameters['crval1_enlarged'] = 19100.0000 # Angstroms + wv_parameters['cdelt1_enlarged'] = 1.7300 # Angstroms/pixel wv_parameters['naxis1_enlarged'] = 3400 # pixels elif grism_name == "LR" and filter_name == "YJ": wv_parameters['islitlet_min'] = 4
Set relevant wavelength calibration coefficients for grism K + filter Ksp
guaix-ucm_pyemir
train
5056115749e503f664eb4f026cf622c1f8fb29d1
diff --git a/phpsec.class.php b/phpsec.class.php index <HASH>..<HASH> 100644 --- a/phpsec.class.php +++ b/phpsec.class.php @@ -421,87 +421,6 @@ class phpsec { } return $injected; } - - /** - * Create a captcha iamge and return filename. - * - * @return mixed - * Returns the filename to the image containing the captcha or false on failure. - */ - public static function captcha() { - /* First, make sure we have GD. */ - if(!function_exists('imagecreatetruecolor')) { - self::error('GD is required to create captchas'); - return false; - } - - self::captchaImgCreate('filename'); - } - - /** - * Create the captcha image, and save it as $filename. - * - * @param string $filename - * Filename to save the captcha as. - */ - private static function captchaImgCreate($filename) { - $width = 120; - $height = 30; - - $img = imagecreatetruecolor($width, $height); - - // Allocate some colors. - $bg[] = imagecolorallocate($img, 212, 219, 234); - $bg[] = imagecolorallocate($img, 212, 219, 234); - $bg[] = imagecolorallocate($img, 212, 219, 234); - $bg[] = imagecolorallocate($img, 212, 219, 234); - $bg[] = imagecolorallocate($img, 212, 219, 234); - $bg[] = imagecolorallocate($img, 162, 176, 205); - $bg[] = imagecolorallocate($img, 162, 176, 205); - $bg[] = imagecolorallocate($img, 179, 191, 217); - $bg[] = imagecolorallocate($img, 179, 191, 217); - $bg[] = imagecolorallocate($img, 255, 184, 47); - $border = imagecolorallocate($img, 0, 0, 0); - $line = imagecolorallocate($img, 255, 0, 0); - - /* Add border. */ - imagerectangle ($img,0 ,0, $width-1, $height-1, $border); - - /* Add some background noice to the image. Loops trough the image and - * randomly set background colors. */ - $numColors = sizeof($bg); - for($y = 1; $y < $height-1; $y++) { - for($x = 1; $x < $width-1; $x++) { - imagesetpixel($img, $x, $y, $bg[rand(0, $numColors-1)]); - } - } - - /* Add a line to the image just for the heck of it. */ - imageline($img, 10, rand(5, $height-5), $width-10, rand(5, $height-5), $line); - - /* Add the text to the image. You need to be a genius to come up with code like this. */ - $str = self::captchaWord(6); - for($i = 0; $i < strlen($str); $i++) { - $char = strtoupper(substr($str, $i, 1)); - imagestring($img, 5, 20+$i*14, rand(5,10), $char, $border); - } - /* Set the magic word in the cache. */ - phpsecCache::cacheSet('captcha', $str); - - /* Save the image in the public data dir. */ - imagepng($img, PHPSEC_PUBLICDATADIR.'/'.$filename.'.png'); - imagedestroy($img); - } - - /** - * Generate a random word to use in the captcha. - * - * @param int $len - * Length of the word. - */ - private static function captchaWord($len = 5) { - return substr(hash(PHPSEC_HASHTYPE, self::genUid()), 0, $len); - } } phpsec::init(); /* Since this is a staticly called library, we need to initialize it ourself as no * contruct funtion is called for us. */
Moved the captcha code to separate file. Fixes #2.
phpsec_phpSec
train
42038e891e1a8945f4237fab84a717421b46b740
diff --git a/CHANGELOG.md b/CHANGELOG.md index <HASH>..<HASH> 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,10 @@ CHANGELOG ========= +## 3.0.3 + +- Fix issue when a single-value Selectivity input is reset to null throught the React API. + ## 3.0.2 - Fix #161: React API: Value should be re-set when the items change. diff --git a/src/selectivity.js b/src/selectivity.js index <HASH>..<HASH> 100644 --- a/src/selectivity.js +++ b/src/selectivity.js @@ -187,6 +187,8 @@ extend(Selectivity.prototype, { var items = this.items; if (items) { return Selectivity.findNestedById(items, id); + } else if (id === null) { + return null; } else { return { id: id, text: '' + id }; } diff --git a/tests/unit/react/single.js b/tests/unit/react/single.js index <HASH>..<HASH> 100644 --- a/tests/unit/react/single.js +++ b/tests/unit/react/single.js @@ -31,6 +31,47 @@ TestUtil.createReactTest( ); TestUtil.createReactTest( + 'react/single: test clear by setting null from outside', + ['inputs/single', 'dropdown', 'templates'], + { + allowClear: true, + async: true, + onChange: _.noop, + value: 1, + query: function(queryOptions) { + queryOptions.callback({ + results: [ + { id: 1, text: 'Amsterdam' }, + { id: 2, text: 'Antwerp' }, + { id: 3, text: 'Athens' } + ] + }); + } + }, + function(SelectivityReact, test, ref, container, $) { + test.plan(4); + + test.equal(ref.getValue(), 1); + + ReactDOM.render( + React.createElement(SelectivityReact, { + allowClear: true, + onChange: _.noop, + value: null + }), + container, + function() { + test.equal(ref.getData(), null); + test.equal(ref.getValue(), null); + + test.equal($('.selectivity-dropdown').length, 0); + test.end(); + } + ); + } +); + +TestUtil.createReactTest( 'react/single: test initial data', ['inputs/single', 'templates'], {
Fix issue when a single-value Selectivity input is reset to null throught the React API.
arendjr_selectivity
train
6d94ecfb72e5c8c401356eaa4de9581cf799b4b1
diff --git a/salt/modules/dnsmasq.py b/salt/modules/dnsmasq.py index <HASH>..<HASH> 100644 --- a/salt/modules/dnsmasq.py +++ b/salt/modules/dnsmasq.py @@ -47,7 +47,7 @@ def fullversion(): .. code-block:: bash - salt '*' dnsmasq.version + salt '*' dnsmasq.fullversion ''' cmd = 'dnsmasq -v' out = __salt__['cmd.run'](cmd).splitlines()
doc fix: correct CLI example for dnsmasq fullversion fun (#<I>)
saltstack_salt
train
7a1feaa89701bf861ab31ebd8ffdc8d8d1474e29
diff --git a/src/js/ripple/transactionmanager.js b/src/js/ripple/transactionmanager.js index <HASH>..<HASH> 100644 --- a/src/js/ripple/transactionmanager.js +++ b/src/js/ripple/transactionmanager.js @@ -365,8 +365,6 @@ TransactionManager.prototype._request = function(tx) { return tx.emit('error', new RippleError('tejLocalSigningRequired', message)); } - tx.emit('presubmit'); - if (tx.finalized) { return; } @@ -557,9 +555,11 @@ TransactionManager.prototype._request = function(tx) { return; } + tx.emit('presubmit'); + submitRequest.timeout(self._submissionTimeout, requestTimeout); - tx.submissions = submitRequest.broadcast(); + tx.submissions = submitRequest.broadcast(); tx.attempts++; tx.emit('postsubmit'); }; diff --git a/test/transaction-test.js b/test/transaction-test.js index <HASH>..<HASH> 100644 --- a/test/transaction-test.js +++ b/test/transaction-test.js @@ -1560,33 +1560,34 @@ describe('Transaction', function() { transaction.submit(); }); - it.skip('Abort submission', function(done) { + it('Abort submission on presubmit', function(done) { var remote = new Remote(); - var transaction = new Transaction(remote).accountSet('r36xtKNKR43SeXnGn7kN4r4JdQzcrkqpWe'); - var account = remote.addAccount('r36xtKNKR43SeXnGn7kN4r4JdQzcrkqpWe'); + remote.setSecret('rJaT8TafQfYJqDm8aC5n3Yx5yWEL2Ery79', 'snPwFATthTkKnGjEW73q3TL4yci1Q'); - account._transactionManager._nextSequence = 1; + var server = new Server(remote, 'wss://s1.ripple.com:443'); + server._computeFee = function() { return '12'; }; + server._connected = true; - account._transactionManager._request = function(tx) { - setTimeout(function() { - tx.emit('success', { }); - }, 20); - }; + remote._servers.push(server); + remote._connected = true; + remote._ledger_current_index = 1; - transaction.complete = function() { - return this; - }; + var transaction = new Transaction(remote).accountSet('rJaT8TafQfYJqDm8aC5n3Yx5yWEL2Ery79'); + var account = remote.account('rJaT8TafQfYJqDm8aC5n3Yx5yWEL2Ery79'); - function submitCallback(err, res) { + account._transactionManager._nextSequence = 1; + + transaction.once('presubmit', function() { + transaction.abort(); + }); + + transaction.submit(function(err, res) { setImmediate(function() { assert(err); assert.strictEqual(err.result, 'tejAbort'); done(); }); - }; - - transaction.submit(submitCallback); - transaction.abort(); + }); }); });
Relocate presubmit emission to immediately before transaction submit
ChainSQL_chainsql-lib
train
3e354f037b78be63252460aca40e28f784ce6846
diff --git a/server/src/main/java/org/jboss/as/server/deployment/Phase.java b/server/src/main/java/org/jboss/as/server/deployment/Phase.java index <HASH>..<HASH> 100644 --- a/server/src/main/java/org/jboss/as/server/deployment/Phase.java +++ b/server/src/main/java/org/jboss/as/server/deployment/Phase.java @@ -525,6 +525,7 @@ public enum Phase { public static final int POST_MODULE_UNDERTOW_MODCLUSTER = 0x3400; public static final int POST_MODULE_TRANSACTIONS_EE_CONCURRENCY = 0x3500; public static final int POST_MODULE_EE_COMPONENT_SUSPEND = 0x3600; + public static final int POST_MODULE_PERMISSIONS_VALIDATION = 0x3700; // INSTALL public static final int INSTALL_SHARED_SESSION_MANAGER = 0x0100;
[WFLY-<I>] Add a phase for the permissions validation.
wildfly_wildfly-core
train
8364da061a106e25916fc0adf49f3baac10b2d73
diff --git a/pyphi/concept_caching.py b/pyphi/concept_caching.py index <HASH>..<HASH> 100644 --- a/pyphi/concept_caching.py +++ b/pyphi/concept_caching.py @@ -5,6 +5,10 @@ """ Objects and functions for managing the normalization, caching, and retrieval of concepts. + +.. warning:: + + Concept caching is disabled and likely broken. Use at your own risk! """ from collections import namedtuple
Add warning to `concept_caching`
wmayner_pyphi
train
08c17d68a02bf78af951466b6209cf149e0ec5a6
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100755 --- a/setup.py +++ b/setup.py @@ -1,11 +1,16 @@ #!/usr/bin/env python # -*- coding: utf-8 +import sys try: from setuptools import setup except ImportError: from distutils.core import setup +install_requires = [] +if sys.version_info < (2, 7): + install_requires.append('argparse') + import fragments setup( @@ -17,6 +22,7 @@ setup( author_email='matt-fragments@theory.org', url='https://github.com/glyphobet/fragments', packages= ['fragments',], + install_requires=install_requires, entry_points = { 'console_scripts': [ 'fragments = fragments.commands:_main', @@ -30,7 +36,9 @@ setup( 'Natural Language :: English', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python', + 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.2', + 'Programming Language :: Python :: 3.3', ), )
support Python <I> and <I> from setup.py
glyphobet_fragments
train
6160cb57f401f9c5eabe6793a456943a2c6dc25d
diff --git a/src/utils/material.js b/src/utils/material.js index <HASH>..<HASH> 100644 --- a/src/utils/material.js +++ b/src/utils/material.js @@ -1,7 +1,8 @@ var THREE = require('../lib/three'); /** - * Update `material.map` given `data.src`. For standard and flat shaders. + * Update `material` texture property (usually but not always `map`) + * from `data` property (usually but not always `src`) * * @param {object} shader - A-Frame shader instance. * @param {object} data @@ -10,24 +11,43 @@ module.exports.updateMapMaterialFromData = function (materialName, dataName, sha var el = shader.el; var material = shader.material; var src = data[dataName]; - var shadowSrcName = '_texture_' + dataName; - if (src) { - if (src === shader[shadowSrcName]) { return; } - // Texture added or changed. - shader[shadowSrcName] = src; - if (src instanceof THREE.Texture) { setMap(src); return; } - el.sceneEl.systems.material.loadTexture(src, {src: src, repeat: data.repeat, offset: data.offset, npot: data.npot}, setMap); + // Because a single material / shader may have multiple textures, + // we need to remember the source value for this data property + // to avoid redundant operations which can be expensive otherwise + // (e.g. video texture loads). + if (!shader.materialSrcs) { shader.materialSrcs = {}; } + + if (!src) { + // Forget the prior material src. + delete shader.materialSrcs[materialName]; + // Remove the texture. + setMap(null); return; } - // Texture removed. - if (!material[materialName]) { return; } - shader[shadowSrcName] = null; - setMap(null); + // Don't process if material src hasn't changed. + if (src === shader.materialSrcs[materialName]) { return; } + + // Remember the new src for this texture (there may be multiple). + shader.materialSrcs[materialName] = src; + + // If the new material src is already a texture, just use it. + if (src instanceof THREE.Texture) { setMap(src); } else { + // Load texture for the new material src. + // (And check if we should still use it once available in callback.) + el.sceneEl.systems.material.loadTexture(src, + {src: src, repeat: data.repeat, offset: data.offset, npot: data.npot}, + checkSetMap); + } + + function checkSetMap (texture) { + // If the source has been changed, don't use loaded texture. + if (shader.materialSrcs[materialName] !== src) { return; } + setMap(texture); + } function setMap (texture) { - if (shader[shadowSrcName] !== src) { return; } material[materialName] = texture; material.needsUpdate = true; handleTextureEvents(el, texture); diff --git a/tests/components/material.test.js b/tests/components/material.test.js index <HASH>..<HASH> 100644 --- a/tests/components/material.test.js +++ b/tests/components/material.test.js @@ -81,6 +81,36 @@ suite('material', function () { }); }); + test('removes texture when src attribute removed', function (done) { + var el = this.el; + var imageUrl = 'base/tests/assets/test.png'; + el.setAttribute('material', ''); + assert.notOk(el.components.material.material.texture); + el.setAttribute('material', 'src: url(' + imageUrl + ')'); + el.addEventListener('materialtextureloaded', function (evt) { + var loadedTexture = evt.detail.texture; + assert.ok(el.components.material.material.map === loadedTexture); + el.removeAttribute('material', 'src'); + assert.notOk(el.components.material.material.map); + done(); + }); + }); + + test('removes texture when src attribute is empty string', function (done) { + var el = this.el; + var imageUrl = 'base/tests/assets/test.png'; + el.setAttribute('material', ''); + assert.notOk(el.components.material.material.texture); + el.setAttribute('material', 'src: url(' + imageUrl + ')'); + el.addEventListener('materialtextureloaded', function (evt) { + var loadedTexture = evt.detail.texture; + assert.ok(el.components.material.material.map === loadedTexture); + el.setAttribute('material', 'src', ''); + assert.notOk(el.components.material.material.map); + done(); + }); + }); + test('sets material to MeshShaderMaterial for custom shaders', function () { var el = this.el; delete shaders.test;
fix case where removing texture (null was not equal to empty string) (#<I>) * fix case where removing texture (null was not equal to empty string) * added tests for removed / empty src attribute per discussion on PR * simplify code per discussion on #<I> * better explanation in comments * collect material src values in materialSrcs, per discussion on PR
aframevr_aframe
train
517a158651774312afff5189dd4f7b6bffc8d543
diff --git a/source/php/BulkImport.php b/source/php/BulkImport.php index <HASH>..<HASH> 100644 --- a/source/php/BulkImport.php +++ b/source/php/BulkImport.php @@ -235,10 +235,15 @@ class BulkImport $index = $this->curl->request('POST', rtrim(AD_INTEGRATION_URL, "/") . '/user/index', $data, 'json', array('Content-Type: application/json')); //Validate json response - if ($this->response::isJsonError($index)) { + if ($this->response::isJsonError($index)||!json_decode($index)) { return false; } + //Check that no errors occured + if (json_last_error() == JSON_ERROR_NONE) { + die("Could not read JSON."); + } + //Return return array_map('strtolower', json_decode($index)); }
Check that response is valid json
helsingborg-stad_active-directory-api-wp-integration
train
8db86aca8db5e6178c755816cc691993f5758562
diff --git a/lib/describe.js b/lib/describe.js index <HASH>..<HASH> 100644 --- a/lib/describe.js +++ b/lib/describe.js @@ -48,7 +48,10 @@ function unwrapComplex (tag, complex) { // Try to unwrap by calling `valueOf()`. `describePrimitive()` will return // `null` if the resulting value is not a primitive, in which case it's // ignored. - if (typeof complex.valueOf === 'function') return describePrimitive(complex.valueOf()) + if (typeof complex.valueOf === 'function') { + const value = complex.valueOf() + if (value !== complex) return describePrimitive(value) + } return null }
Guard against valueOf() returning the same object
concordancejs_concordance
train
a684dbdf6f340360e493345fd013be050ea0c9f1
diff --git a/src/core/structured-log.js b/src/core/structured-log.js index <HASH>..<HASH> 100644 --- a/src/core/structured-log.js +++ b/src/core/structured-log.js @@ -403,26 +403,53 @@ if (!batchOptions.timeDuration) { batchOptions.timeDuration = 1000; } - + var batchedLogEvents = []; var lastFlushTime = (new Date()).getTime(); return self.pipe(function (evt, next) { - batchedLogEvents.push(evt); - var curTime = (new Date()).getTime(); + if (batchFlushTimeout) { + // Cancel previous pending batch flush. + clearTimeout(batchFlushTimeout); + batchFlushTimeout = null; + } - if ((batchOptions.batchSize && batchedLogEvents.length >= batchOptions.batchSize) || - (batchOptions.timeDuration && (curTime - lastFlushTime) > batchOptions.timeDuration)) { + var batchFlushTimeout = null; // Used to cancel the pending flush. + // + // Flush the batch. + // + var flushBatch = function () { // Flush the batch. batchedLogEvents.reverse(); batchedLogEvents.forEach(function (batchedEvent) { next(batchedEvent); }); - batchedLogEvents = []; + batchedLogEvents = []; lastFlushTime = curTime; + batchFlushTimeout = null; + }; + + // Queue pending batch flush. + batchFlushTimeout = setTimeout(flushBatch, batchOptions.timeDuration); + + batchedLogEvents.push(evt); + + var curTime = (new Date()).getTime(); + + + if (batchedLogEvents.length >= batchOptions.batchSize || + batchOptions.timeDuration && (curTime - lastFlushTime) > batchOptions.timeDuration) { + + if (batchFlushTimeout) { + // Cancel previous pending batch flush. + clearTimeout(batchFlushTimeout); + batchFlushTimeout = null; + } + + flushBatch(); } }); }; diff --git a/test/structured-log.tests.js b/test/structured-log.tests.js index <HASH>..<HASH> 100644 --- a/test/structured-log.tests.js +++ b/test/structured-log.tests.js @@ -235,4 +235,37 @@ describe('Logger', function(){ assert(log1, written[0].message); assert(log2, written[1].message); }); + + it('batching by time should suppress log events until the time has elapsed', function (done) { + + var written = []; + var log = serilog.configuration() + .batch({ + timeDuration: 100, + }) + .writeTo(function(evt) { written.push(evt); }) + .createLogger(); + + var log1 = '1'; + log(log1); + + assert.equal(0, written.length); + + // Wait for the time out to elapse. + setTimeout(function () { + + try { + assert.equal(1, written.length); + assert(log1, written[0].message); + } + catch (ex) + { + done(ex); + return; + } + + done(); + }, 100); + + }); }); \ No newline at end of file
Implemented the most basic version of batching possible. It is currently self-contained within the 'batch' function. This is a terrible implementation but it works, has tests, fits in with the existing pipeline code and doesn't touch the rest of the code. The implementation will have to change anyway to be able to implement the flush function.
structured-log_structured-log
train
02aec236c880e56c609e7733f34a0546be266937
diff --git a/javascript/firefox-driver/js/utils.js b/javascript/firefox-driver/js/utils.js index <HASH>..<HASH> 100644 --- a/javascript/firefox-driver/js/utils.js +++ b/javascript/firefox-driver/js/utils.js @@ -243,7 +243,7 @@ Utils.useNativeEvents = function() { prefs.prefHasUserValue("webdriver_enable_native_events") ? prefs.getBoolPref("webdriver_enable_native_events") : false; - return enableNativeEvents && Utils.getNativeEvents(); + return !!(enableNativeEvents && Utils.getNativeEvents()); }; Utils.type = function(doc, element, text, opt_useNativeEvents, jsTimer, releaseModifiers,
DanielWagnerHall: Coerce to boolean, on the off chance that's what's causing the tests to fail on CI r<I>
SeleniumHQ_selenium
train
340e972bf4f722600a391083d63ba502dcf53bad
diff --git a/src/main/java/org/jenkinsci/plugins/ghprb/GhprbRepository.java b/src/main/java/org/jenkinsci/plugins/ghprb/GhprbRepository.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/jenkinsci/plugins/ghprb/GhprbRepository.java +++ b/src/main/java/org/jenkinsci/plugins/ghprb/GhprbRepository.java @@ -73,7 +73,7 @@ public class GhprbRepository { Set<Integer> closedPulls = new HashSet<Integer>(pulls.keySet()); for(GHPullRequest pr : prs){ - try { + if(pr.getHead() == null) try { pr = repo.getPullRequest(pr.getNumber()); } catch (IOException ex) { Logger.getLogger(GhprbRepository.class.getName()).log(Level.SEVERE, "Could not retrieve pr " + pr.getNumber(), ex);
check if the detailed PR information needs to be obtained
jenkinsci_ghprb-plugin
train
d0a156b665f2e5d548af24e7e6ada4012f9204ff
diff --git a/js/ClientList.js b/js/ClientList.js index <HASH>..<HASH> 100644 --- a/js/ClientList.js +++ b/js/ClientList.js @@ -234,7 +234,7 @@ selectionDiv = document.createElement('div'); this.bodyDiv.appendChild(selectionDiv); selectionDiv.appendChild(document.createTextNode('Selected IDs: ')); - this.clientsField = W.getTextInput(); + this.clientsField = W.getTextArea(); selectionDiv.appendChild(this.clientsField); recipientSelector = W.getRecipientSelector(); recipientSelector.onchange = function() { @@ -641,8 +641,14 @@ table = i < 4 ? this.msgBar.table : this.msgBar.tableAdvanced; table.add(field, i, 0); - table.add(W.getTextInput( - this.msgBar.id + '_' + field, {tabindex: i+1}), i, 1); + if (field === 'data') { + table.add(W.getTextArea( + this.msgBar.id + '_' + field, {tabindex: i+1}), i, 1); + } + else { + table.add(W.getTextInput( + this.msgBar.id + '_' + field, {tabindex: i+1}), i, 1); + } if (field === 'action') { this.msgBar.actionSel = W.getActionSelector(
Changed some TextInputs to TextAreas
nodeGame_ultimatum-game
train
7969c70888fd99bf30692e0e5563f3da059a3002
diff --git a/src/css_composer/model/CssRule.js b/src/css_composer/model/CssRule.js index <HASH>..<HASH> 100644 --- a/src/css_composer/model/CssRule.js +++ b/src/css_composer/model/CssRule.js @@ -1,5 +1,5 @@ import { map } from 'underscore'; -import Backbone from 'backbone'; +import { Model } from 'backbone'; import Styleable from 'domain_abstract/model/Styleable'; import { isEmpty, forEach, isString } from 'underscore'; import Selectors from 'selector_manager/model/Selectors'; @@ -7,40 +7,42 @@ import { isEmptyObj } from 'utils/mixins'; const { CSS } = window; -export default Backbone.Model.extend(Styleable).extend({ - defaults: { - // Css selectors - selectors: [], +export default class CssRule extends Model.extend(Styleable) { + defaults() { + return { + // Css selectors + selectors: [], - // Additional string css selectors - selectorsAdd: '', + // Additional string css selectors + selectorsAdd: '', - // Css properties style - style: {}, + // Css properties style + style: {}, - // On which device width this rule should be rendered, eg. @media (max-width: 1000px) - mediaText: '', + // On which device width this rule should be rendered, eg. @media (max-width: 1000px) + mediaText: '', - // State of the rule, eg: hover | pressed | focused - state: '', + // State of the rule, eg: hover | pressed | focused + state: '', - // Indicates if the rule is stylable - stylable: true, + // Indicates if the rule is stylable + stylable: true, - // Type of at-rule, eg. 'media', 'font-face', etc. - atRuleType: '', + // Type of at-rule, eg. 'media', 'font-face', etc. + atRuleType: '', - // This particolar property is used only on at-rules, like 'page' or - // 'font-face', where the block containes only style declarations - singleAtRule: 0, + // This particolar property is used only on at-rules, like 'page' or + // 'font-face', where the block containes only style declarations + singleAtRule: 0, - // If true, sets '!important' on all properties - // You can use an array to specify properties to set important - // Used in view - important: 0, + // If true, sets '!important' on all properties + // You can use an array to specify properties to set important + // Used in view + important: 0, - _undo: true - }, + _undo: true + }; + } initialize(c, opt = {}) { this.config = c || {}; @@ -48,20 +50,20 @@ export default Backbone.Model.extend(Styleable).extend({ this.em = opt.em; this.ensureSelectors(); this.on('change', this.__onChange); - }, + } __onChange(m, opts) { const { em } = this; const changed = this.changedAttributes(); !isEmptyObj(changed) && em && em.changesUp(opts); - }, + } clone() { const opts = { ...this.opt }; const attr = { ...this.attributes }; attr.selectors = this.get('selectors').map(s => s.clone()); return new this.constructor(attr, opts); - }, + } ensureSelectors(m, c, opts) { const { em } = this; @@ -83,7 +85,7 @@ export default Backbone.Model.extend(Styleable).extend({ this.set('selectors', sels, opts); this.listenTo(...toListen); - }, + } /** * Returns an at-rule statement if possible, eg. '@media (...)', '@keyframes' @@ -96,7 +98,7 @@ export default Backbone.Model.extend(Styleable).extend({ const typeStr = type ? `@${type}` : condition ? '@media' : ''; return typeStr + (condition && typeStr ? ` ${condition}` : ''); - }, + } /** * Return selectors fo the rule as a string @@ -118,7 +120,7 @@ export default Backbone.Model.extend(Styleable).extend({ selectors && result.push(`${selectors}${stateStr}`); addSelector && !opts.skipAdd && result.push(addSelector); return result.join(', '); - }, + } /** * Get declaration block @@ -136,7 +138,7 @@ export default Backbone.Model.extend(Styleable).extend({ } return result; - }, + } /** * Returns CSS string of the rule @@ -154,13 +156,13 @@ export default Backbone.Model.extend(Styleable).extend({ } return result; - }, + } toJSON(...args) { - const obj = Backbone.Model.prototype.toJSON.apply(this, args); + const obj = Model.prototype.toJSON.apply(this, args); if (this.em.getConfig('avoidDefaults')) { - const defaults = this.defaults; + const defaults = this.defaults(); forEach(defaults, (value, key) => { if (obj[key] === value) { @@ -173,7 +175,7 @@ export default Backbone.Model.extend(Styleable).extend({ } return obj; - }, + } /** * Compare the actual model with parameters @@ -216,4 +218,4 @@ export default Backbone.Model.extend(Styleable).extend({ return true; } -}); +}
Transform CssRule in ES6 Class
artf_grapesjs
train
d10bd281e59b02f4c74375ee12f209bc11ad89b9
diff --git a/fermipy/merge_utils.py b/fermipy/merge_utils.py index <HASH>..<HASH> 100644 --- a/fermipy/merge_utils.py +++ b/fermipy/merge_utils.py @@ -7,7 +7,8 @@ import sys import argparse import numpy as np from astropy.io import fits - +from fermipy.hpx_utils import HPX +from fermipy.skymap import HpxMap def update_null_primary(hdu_in, hdu=None): """ 'Update' a null primary HDU @@ -140,19 +141,22 @@ def extract_gti_data(hdu_in): return (data, exposure, tstop) -def update_hpx_skymap_allsky(hdu_in, hdu): +def update_hpx_skymap_allsky(map_in, map_out): """ 'Update' a HEALPix skymap - This checks hdu exists and creates it from hdu_in if it does not. - If hdu does exist, this adds the data in hdu_in to hdu + This checks map_out exists and creates it from map_in if it does not. + If map_out does exist, this adds the data in map_in to map_out """ - if hdu is None: - hdu = fits.BinTableHDU( - data=hdu_in.data, header=hdu_in.header, name=hdu_in.name) + if map_out is None: + in_hpx = map_in.hpx + out_hpx = HPX.create_hpx(in_hpx.nside, in_hpx.nest, in_hpx.coordsys, + None, in_hpx.ebins, None, in_hpx.conv, None) + data_out = map_in.expanded_counts_map() + print(data_out.shape, data_out.sum()) + map_out = HpxMap(data_out, out_hpx) else: - for col in hdu.columns: - hdu.data[col.name] += hdu_in.data[col.name] - return hdu + map_out.data += map_in.expanded_counts_map() + return map_out def merge_wcs_counts_cubes(filelist): @@ -209,13 +213,18 @@ def merge_hpx_counts_cubes(filelist): nfiles = len(filelist) ngti = np.zeros(nfiles, int) + out_name = None + for i, filename in enumerate(filelist): fin = fits.open(filename) sys.stdout.write('.') sys.stdout.flush() if i == 0: out_prim = update_null_primary(fin[0], out_prim) - out_skymap = update_hpx_skymap_allsky(fin[1], out_skymap) + out_name = fin[1].name + + map_in = HpxMap.create_from_hdulist(fin) + out_skymap = update_hpx_skymap_allsky(map_in, out_skymap) if i == 0: try: out_ebounds = update_ebounds(fin["EBOUNDS"], out_ebounds) @@ -239,7 +248,9 @@ def merge_hpx_counts_cubes(filelist): else: fin.close() - hdulist = [out_prim, out_skymap, out_ebounds] + out_skymap_hdu = out_skymap.create_image_hdu("SKYMAP") + + hdulist = [out_prim, out_skymap_hdu, out_ebounds] if len(datalist_gti) > 0: out_gti = merge_all_gti_data(datalist_gti, ngti, first['GTI'])
adjust merge utils to deal with explicit GARDIAN maps
fermiPy_fermipy
train
4c4f2b414802acf443b067ce5ce8625730beadcf
diff --git a/language/Fallbacks.php b/language/Fallbacks.php index <HASH>..<HASH> 100644 --- a/language/Fallbacks.php +++ b/language/Fallbacks.php @@ -5,7 +5,7 @@ // Listed a few examples for now. // Perhaps this can be automated in the future via TranslateWiki based on MediaWiki's core fallbacks. -// Values can be geted by getFallbacks.php +// Values can be refreshed with getFallbacks.php $fallbacks = array( 'ab' => 'ru', 'ace' => 'id', @@ -22,6 +22,7 @@ $fallbacks = array( 'bar' => 'de', 'bat_smg' => 'sgs', 'bcc' => 'fa', + 'be_tarask' => 'be', 'be_x_old' => 'be-tarask', 'bh' => 'bho', 'bjn' => 'id',
Add 'be' as fallback to 'be-tarask'. Changed in core at <URL>
Krinkle_intuition
train
cb5c1faa9dd1debc5dbaf41485215c6d43e71d7a
diff --git a/src/connectors/refinement-list/connectRefinementList.js b/src/connectors/refinement-list/connectRefinementList.js index <HASH>..<HASH> 100644 --- a/src/connectors/refinement-list/connectRefinementList.js +++ b/src/connectors/refinement-list/connectRefinementList.js @@ -65,10 +65,9 @@ export default function connectRefinementList(renderFn) { }) => { checkUsage({attributeName, operator, usage}); - /* eslint-disable max-params */ - const render = (items, state, createURL, + const render = ({items, state, createURL, helperSpecializedSearchFacetValues, - refine, isFromSearch, isFirstSearch, instantSearchInstance) => { + refine, isFromSearch, isFirstSearch, instantSearchInstance}) => { // Compute a specific createURL method able to link to any facet value state change const _createURL = facetValue => createURL(state.toggleRefinement(attributeName, facetValue)); @@ -79,7 +78,9 @@ export default function connectRefinementList(renderFn) { state, createURL, helperSpecializedSearchFacetValues, - refine); + refine, + instantSearchInstance, + ); renderFn({ createURL: _createURL, @@ -97,22 +98,36 @@ export default function connectRefinementList(renderFn) { let refine; const createSearchForFacetValues = helper => - (state, createURL, helperSpecializedSearchFacetValues, toggleRefinement) => + (state, createURL, helperSpecializedSearchFacetValues, toggleRefinement, instantSearchInstance) => query => { if (query === '' && lastResultsFromMainSearch) { // render with previous data from the helper. - render( - lastResultsFromMainSearch, state, createURL, - helperSpecializedSearchFacetValues, toggleRefinement, false); + render({ + items: lastResultsFromMainSearch, + state, + createURL, + helperSpecializedSearchFacetValues, + refine: toggleRefinement, + isFromSearch: false, + isFirstSearch: false, + instantSearchInstance, + }); } else { helper.searchForFacetValues(attributeName, query).then(results => { const facetValues = results.facetHits.map(h => { h.name = h.value; return h; }); - render( - facetValues, state, createURL, - helperSpecializedSearchFacetValues, toggleRefinement, true, false); + render({ + items: facetValues, + state, + createURL, + helperSpecializedSearchFacetValues, + refine: toggleRefinement, + isFromSearch: true, + isFirstSearch: false, + instantSearchInstance, + }); }); } }; @@ -137,9 +152,18 @@ export default function connectRefinementList(renderFn) { searchForFacetValues = createSearchForFacetValues(helper); - render([], helper.state, createURL, searchForFacetValues, refine, false, true, instantSearchInstance); + render({ + items: [], + state: helper.state, + createURL, + helperSpecializedSearchFacetValues: searchForFacetValues, + refine, + isFromSearch: false, + isFirstSearch: true, + instantSearchInstance, + }); }, - render({results, state, createURL}) { + render({results, state, createURL, instantSearchInstance}) { const facetValues = results .getFacetValues(attributeName, {sortBy}) .map(h => { @@ -149,7 +173,16 @@ export default function connectRefinementList(renderFn) { lastResultsFromMainSearch = facetValues; - render(facetValues, state, createURL, searchForFacetValues, refine, false, false); + render({ + items: facetValues, + state, + createURL, + helperSpecializedSearchFacetValues: searchForFacetValues, + refine, + isFromSearch: false, + isFirstSearch: false, + instantSearchInstance, + }); }, }; };
feat(connector): small internal refactoring for SFFV
algolia_instantsearch.js
train
3f5f983f5fab911608f955cc996607e7b79a504f
diff --git a/dvc/ui/__init__.py b/dvc/ui/__init__.py index <HASH>..<HASH> 100644 --- a/dvc/ui/__init__.py +++ b/dvc/ui/__init__.py @@ -87,7 +87,7 @@ class Console: data: Any, indent: int = 2, ) -> None: - if sys.stdout.isatty(): + if self.isatty(): from rich.json import JSON j = JSON.from_data(data, indent=indent) @@ -237,6 +237,11 @@ class Console: def status(self, status: str, **kwargs: Any) -> "Status": return self.error_console.status(status, **kwargs) + def isatty(self) -> bool: + import sys + + return sys.stdout.isatty() + ui = Console()
ui: import sys on write_json
iterative_dvc
train
99bf08c148f3c5a87cd4fe69a4c3620a5972536d
diff --git a/lib/evalhook.rb b/lib/evalhook.rb index <HASH>..<HASH> 100644 --- a/lib/evalhook.rb +++ b/lib/evalhook.rb @@ -226,6 +226,14 @@ module EvalHook def evalhook_i(code, b_ = nil, name = "(eval)", line = 1) EvalHook.validate_syntax code + + tree = RubyParser.new.parse code + + context = PartialRuby::PureRubyContext.new + emulationcode = context.emul tree + + eval emulationcode + end end
<I> test pass: implemented evalhook_i using RubyParser and PartialRuby (without hooking)
tario_evalhook
train
6f788b1d91cd1d9d5731527beec59c16df94862a
diff --git a/activerecord/test/schema/schema.rb b/activerecord/test/schema/schema.rb index <HASH>..<HASH> 100644 --- a/activerecord/test/schema/schema.rb +++ b/activerecord/test/schema/schema.rb @@ -334,7 +334,7 @@ ActiveRecord::Schema.define do t.integer :ideal_reference_id end - create_table :keyboards, force: true, :id => false do |t| + create_table :keyboards, force: true, id: false do |t| t.primary_key :key_number t.string :name end
Use <I> Hash syntax consistently
rails_rails
train
864037c674c3aa12b45742baeca89ee98a8001ad
diff --git a/lib/dynamodb.js b/lib/dynamodb.js index <HASH>..<HASH> 100644 --- a/lib/dynamodb.js +++ b/lib/dynamodb.js @@ -80,6 +80,9 @@ if (err) $this.events.error.apply( $this, [ method, err , params ] ) + if ((data || {}).hasOwnProperty('ConsumedCapacity') ) + $this.ConsumedCapacity = data.ConsumedCapacity + callback.apply( $this, [ err, data ] ) }) } @@ -185,8 +188,6 @@ this.routeCall('putItem', $thisQuery , function(err,data) { if (err) return typeof callback !== "function" ? null : callback.apply( this, [ err, false ] ) - - this.ConsumedCapacity = data.ConsumedCapacity typeof callback !== "function" ? null : callback.apply( this, [ err, data, data ]) }) @@ -213,8 +214,6 @@ this.routeCall('putItem', $thisQuery , function(err,data) { if (err) return typeof callback !== "function" ? null : callback.apply( this, [ err, false ] ) - - this.ConsumedCapacity = data.ConsumedCapacity typeof callback !== "function" ? null : callback.apply( this, [ err, data, data ]) }) @@ -247,8 +246,6 @@ if (err) return typeof callback !== "function" ? null : callback.apply( this, [ err, false ] ) - this.ConsumedCapacity = data.ConsumedCapacity - typeof callback !== "function" ? null : callback.apply( this, [ err, data, data ]) }) }) @@ -294,8 +291,6 @@ if (err) return typeof callback !== "function" ? null : callback.apply( this, [ err, false ] ) - this.ConsumedCapacity = data.ConsumedCapacity - typeof callback !== "function" ? null : callback.apply( this, [ err, data, data ]) }) }) @@ -344,8 +339,6 @@ if (err) return typeof callback !== "function" ? null : callback.apply( this, [ err, false ] ) - this.ConsumedCapacity = data.ConsumedCapacity - typeof callback !== "function" ? null : callback.apply( this, [ err, data, data ]) }) }) @@ -362,8 +355,6 @@ if (err) return typeof callback !== "function" ? null : callback.apply( this, [ err, false ] ) - this.ConsumedCapacity = data.ConsumedCapacity - typeof callback !== "function" ? null : callback.apply( this, [ err, data, data ]) }) } @@ -382,8 +373,6 @@ if (err) return $attrz.apply( this, [ err, false ] ) - this.ConsumedCapacity = data.ConsumedCapacity - $attrz.apply( this, [ err, data, data ]) }) } else { @@ -404,8 +393,6 @@ if (err) return typeof callback !== "function" ? null : callback.apply( this, [ err, false ] ) - this.ConsumedCapacity = data.ConsumedCapacity - typeof callback !== "function" ? null : callback.apply( this, [ err, data, data ]) }) } @@ -427,8 +414,6 @@ if (err) return typeof callback !== "function" ? null : callback.apply( this, [ err, false ] ) - this.ConsumedCapacity = data.ConsumedCapacity - typeof callback !== "function" ? null : callback.apply( this, [ err, util.normalizeItem(data.Item), data ]) }) } @@ -467,8 +452,6 @@ this.LastEvaluatedKey = data.LastEvaluatedKey === undefined ? null : data.LastEvaluatedKey - this.ConsumedCapacity = data.ConsumedCapacity - typeof callback !== "function" ? null : callback.apply( this, [ err, util.normalizeList(data.Items), data ]) }) @@ -502,8 +485,6 @@ return typeof callback !== "function" ? null : callback.apply( this, [ err, false ] ) this.LastEvaluatedKey = data.LastEvaluatedKey === undefined ? null : data.LastEvaluatedKey - - this.ConsumedCapacity = data.ConsumedCapacity typeof callback !== "function" ? null : callback.apply( this, [ err, util.normalizeList(data.Items), data ])
global handling of ConsumedCapacity
awspilot_dynamodb-oop
train
4210253d3c1ac207b3038df766cea5b288c8eeb6
diff --git a/tacl/data_store.py b/tacl/data_store.py index <HASH>..<HASH> 100644 --- a/tacl/data_store.py +++ b/tacl/data_store.py @@ -792,7 +792,7 @@ class DataStore: '{} has changed since its n-grams were ' 'added to the database'.format(filename)) if count == 0: - self._logger.error('Catalogue references work {} that does ' + self._logger.error('Catalogue references work "{}" that does ' 'not exist in the corpus'.format(name)) raise FileNotFoundError return is_valid
Quoted work filename is error message.
ajenhl_tacl
train
72e345aa4e6c0c8b92013437c9ed16d2b32e5ea8
diff --git a/pkg/apiserver/handlers.go b/pkg/apiserver/handlers.go index <HASH>..<HASH> 100644 --- a/pkg/apiserver/handlers.go +++ b/pkg/apiserver/handlers.go @@ -502,7 +502,7 @@ func (r *RequestInfoResolver) GetRequestInfo(req *http.Request) (RequestInfo, er switch req.Method { case "POST": requestInfo.Verb = "create" - case "GET": + case "GET", "HEAD": requestInfo.Verb = "get" case "PUT": requestInfo.Verb = "update" diff --git a/pkg/apiserver/handlers_test.go b/pkg/apiserver/handlers_test.go index <HASH>..<HASH> 100644 --- a/pkg/apiserver/handlers_test.go +++ b/pkg/apiserver/handlers_test.go @@ -222,7 +222,9 @@ func TestGetAPIRequestInfo(t *testing.T) { {"GET", "/api/v1/namespaces/other/pods", "list", "api", "", "v1", "other", "pods", "", "", []string{"pods"}}, {"GET", "/api/v1/namespaces/other/pods/foo", "get", "api", "", "v1", "other", "pods", "", "foo", []string{"pods", "foo"}}, + {"HEAD", "/api/v1/namespaces/other/pods/foo", "get", "api", "", "v1", "other", "pods", "", "foo", []string{"pods", "foo"}}, {"GET", "/api/v1/pods", "list", "api", "", "v1", api.NamespaceAll, "pods", "", "", []string{"pods"}}, + {"HEAD", "/api/v1/pods", "list", "api", "", "v1", api.NamespaceAll, "pods", "", "", []string{"pods"}}, {"GET", "/api/v1/namespaces/other/pods/foo", "get", "api", "", "v1", "other", "pods", "", "foo", []string{"pods", "foo"}}, {"GET", "/api/v1/namespaces/other/pods", "list", "api", "", "v1", "other", "pods", "", "", []string{"pods"}},
handle the HEAD verb correctly for authorization
kubernetes_kubernetes
train
5d05b032dc75242aab86a699791b95a95d1f0cad
diff --git a/drools-compiler/src/main/java/org/drools/rule/builder/PatternBuilder.java b/drools-compiler/src/main/java/org/drools/rule/builder/PatternBuilder.java index <HASH>..<HASH> 100755 --- a/drools-compiler/src/main/java/org/drools/rule/builder/PatternBuilder.java +++ b/drools-compiler/src/main/java/org/drools/rule/builder/PatternBuilder.java @@ -772,6 +772,8 @@ public class PatternBuilder fieldBindingDescr.getExpression(), declr, true ); + declr.setReadAccessor( extractor ); + } @SuppressWarnings("unchecked") diff --git a/drools-core/src/main/java/org/drools/rule/Pattern.java b/drools-core/src/main/java/org/drools/rule/Pattern.java index <HASH>..<HASH> 100644 --- a/drools-core/src/main/java/org/drools/rule/Pattern.java +++ b/drools-core/src/main/java/org/drools/rule/Pattern.java @@ -163,30 +163,31 @@ public class Pattern clone.setSource( (PatternSource) this.getSource().clone() ); } + if( this.declarations != null ) { + for( Declaration decl : (Iterable<Declaration>) this.declarations.values() ) { + clone.addDeclaration( decl.getIdentifier() ).setReadAccessor( decl.getExtractor() ); + } + } + for ( final Iterator it = this.constraints.iterator(); it.hasNext(); ) { final Object constr = it.next(); - if ( constr instanceof Declaration ) { - final Declaration decl = (Declaration) constr; - clone.addDeclaration( decl.getIdentifier() ).setReadAccessor( decl.getExtractor() ); - } else { - Constraint constraint = (Constraint) ((Constraint) constr).clone(); - - // we must update pattern references in cloned declarations - Declaration[] oldDecl = ((Constraint) constr).getRequiredDeclarations(); - Declaration[] newDecl = constraint.getRequiredDeclarations(); - for ( int i = 0; i < newDecl.length; i++ ) { - if ( newDecl[i].getPattern() == this ) { - newDecl[i].setPattern( clone ); - // we still need to call replace because there might be nested declarations to replace - constraint.replaceDeclaration( oldDecl[i], - newDecl[i] ); - } + Constraint constraint = (Constraint) ((Constraint) constr).clone(); + + // we must update pattern references in cloned declarations + Declaration[] oldDecl = ((Constraint) constr).getRequiredDeclarations(); + Declaration[] newDecl = constraint.getRequiredDeclarations(); + for ( int i = 0; i < newDecl.length; i++ ) { + if ( newDecl[i].getPattern() == this ) { + newDecl[i].setPattern( clone ); + // we still need to call replace because there might be nested declarations to replace + constraint.replaceDeclaration( oldDecl[i], + newDecl[i] ); } - - clone.addConstraint( constraint ); } - } + clone.addConstraint( constraint ); + } + if ( behaviors != null ) { for ( Behavior behavior : this.behaviors ) { clone.addBehavior( behavior );
JBRULES-<I>: fixing pattern clone method
kiegroup_drools
train
f6ac80c5071f37f12677b5f79e3394724c82a369
diff --git a/cmd/devp2p/discv4cmd.go b/cmd/devp2p/discv4cmd.go index <HASH>..<HASH> 100644 --- a/cmd/devp2p/discv4cmd.go +++ b/cmd/devp2p/discv4cmd.go @@ -25,6 +25,7 @@ import ( "github.com/ethereum/go-ethereum/cmd/devp2p/internal/v4test" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/crypto" + "github.com/ethereum/go-ethereum/internal/flags" "github.com/ethereum/go-ethereum/p2p/discover" "github.com/ethereum/go-ethereum/p2p/enode" "github.com/ethereum/go-ethereum/params" @@ -49,32 +50,34 @@ var ( Usage: "Sends ping to a node", Action: discv4Ping, ArgsUsage: "<node>", + Flags: v4NodeFlags, } discv4RequestRecordCommand = &cli.Command{ Name: "requestenr", Usage: "Requests a node record using EIP-868 enrRequest", Action: discv4RequestRecord, ArgsUsage: "<node>", + Flags: v4NodeFlags, } discv4ResolveCommand = &cli.Command{ Name: "resolve", Usage: "Finds a node in the DHT", Action: discv4Resolve, ArgsUsage: "<node>", - Flags: []cli.Flag{bootnodesFlag}, + Flags: v4NodeFlags, } discv4ResolveJSONCommand = &cli.Command{ Name: "resolve-json", Usage: "Re-resolves nodes in a nodes.json file", Action: discv4ResolveJSON, - Flags: []cli.Flag{bootnodesFlag}, + Flags: v4NodeFlags, ArgsUsage: "<nodes.json file>", } discv4CrawlCommand = &cli.Command{ Name: "crawl", Usage: "Updates a nodes.json file with random nodes found in the DHT", Action: discv4Crawl, - Flags: []cli.Flag{bootnodesFlag, crawlTimeoutFlag}, + Flags: flags.Merge(v4NodeFlags, []cli.Flag{crawlTimeoutFlag}), } discv4TestCommand = &cli.Command{ Name: "test", @@ -119,6 +122,13 @@ var ( } ) +var v4NodeFlags = []cli.Flag{ + bootnodesFlag, + nodekeyFlag, + nodedbFlag, + listenAddrFlag, +} + func discv4Ping(ctx *cli.Context) error { n := getNodeArg(ctx) disc := startV4(ctx) diff --git a/cmd/geth/chaincmd.go b/cmd/geth/chaincmd.go index <HASH>..<HASH> 100644 --- a/cmd/geth/chaincmd.go +++ b/cmd/geth/chaincmd.go @@ -166,10 +166,12 @@ This command dumps out the state for a given block (or latest, if none provided) // initGenesis will initialise the given JSON format genesis file and writes it as // the zero'd block (i.e. genesis) or will fail hard if it can't succeed. func initGenesis(ctx *cli.Context) error { - // Make sure we have a valid genesis JSON + if ctx.Args().Len() != 1 { + utils.Fatalf("need genesis.json file as the only argument") + } genesisPath := ctx.Args().First() if len(genesisPath) == 0 { - utils.Fatalf("Must supply path to genesis JSON file") + utils.Fatalf("invalid path to genesis file") } file, err := os.Open(genesisPath) if err != nil { diff --git a/cmd/geth/consolecmd.go b/cmd/geth/consolecmd.go index <HASH>..<HASH> 100644 --- a/cmd/geth/consolecmd.go +++ b/cmd/geth/consolecmd.go @@ -114,6 +114,10 @@ func localConsole(ctx *cli.Context) error { // remoteConsole will connect to a remote geth instance, attaching a JavaScript // console to it. func remoteConsole(ctx *cli.Context) error { + if ctx.Args().Len() > 1 { + utils.Fatalf("invalid command-line: too many arguments") + } + endpoint := ctx.Args().First() if endpoint == "" { cfg := defaultNodeConfig() diff --git a/internal/flags/helpers.go b/internal/flags/helpers.go index <HASH>..<HASH> 100644 --- a/internal/flags/helpers.go +++ b/internal/flags/helpers.go @@ -38,6 +38,15 @@ func NewApp(gitCommit, gitDate, usage string) *cli.App { return app } +// Merge merges the given flag slices. +func Merge(groups ...[]cli.Flag) []cli.Flag { + var ret []cli.Flag + for _, group := range groups { + ret = append(ret, group...) + } + return ret +} + var migrationApplied = map[*cli.Command]struct{}{} // MigrateGlobalFlags makes all global flag values available in the @@ -70,6 +79,10 @@ func MigrateGlobalFlags(ctx *cli.Context) { // This iterates over all commands and wraps their action function. iterate(ctx.App.Commands, func(cmd *cli.Command) { + if cmd.Action == nil { + return + } + action := cmd.Action cmd.Action = func(ctx *cli.Context) error { doMigrateFlags(ctx)
cmd/geth, cmd/devp2p: fix some cli parsing issues (#<I>) * cmd/geth: add some missing argument count checks * internal/flags: skip cmds with no action func in MigrateGlobalFlags * internal/flags: add Merge * cmd/devp2p: re-add listener config flags in discv4 commands
ethereum_go-ethereum
train
8fb392849d8a9e924f37e4bd1736c6efc0246f14
diff --git a/pointer/smudge.go b/pointer/smudge.go index <HASH>..<HASH> 100644 --- a/pointer/smudge.go +++ b/pointer/smudge.go @@ -7,8 +7,8 @@ import ( "os" ) -func Smudge(writer io.Writer, sha string) error { - mediafile, err := gitmedia.LocalMediaPath(sha) +func Smudge(writer io.Writer, oid string) error { + mediafile, err := gitmedia.LocalMediaPath(oid) if err != nil { return err } @@ -20,7 +20,7 @@ func Smudge(writer io.Writer, sha string) error { } if err != nil { - return &SmudgeError{sha, mediafile, err.Error()} + return &SmudgeError{oid, mediafile, err.Error()} } else { return nil } @@ -60,7 +60,7 @@ func copyFile(reader io.ReadCloser, writers ...io.Writer) error { } type SmudgeError struct { - Sha string + Oid string Filename string ErrorMessage string }
oid, not sha
git-lfs_git-lfs
train
f8cc5c8f8e9eaabaf9572436b1b6640233b6cb40
diff --git a/mod/quiz/lib.php b/mod/quiz/lib.php index <HASH>..<HASH> 100644 --- a/mod/quiz/lib.php +++ b/mod/quiz/lib.php @@ -1732,7 +1732,7 @@ function quiz_extend_settings_navigation($settings, $quiznode) { } if (has_any_capability(['mod/quiz:manageoverrides', 'mod/quiz:viewoverrides'], $PAGE->cm->context)) { - $url = new moodle_url('/mod/quiz/overrides.php', array('cmid' => $PAGE->cm->id)); + $url = new moodle_url('/mod/quiz/overrides.php', array('cmid' => $PAGE->cm->id, 'mode' => 'user')); $node = navigation_node::create(get_string('overrides', 'quiz'), $url, navigation_node::TYPE_SETTING, null, 'mod_quiz_useroverrides'); $quiznode->add_node($node, $beforekey);
MDL-<I> quiz: Set 'Overrides' to link to user overrides by default
moodle_moodle
train
b78670af23bee6a1631bd65b627f74748c9fee26
diff --git a/src/Auth0.php b/src/Auth0.php index <HASH>..<HASH> 100644 --- a/src/Auth0.php +++ b/src/Auth0.php @@ -618,6 +618,28 @@ class Auth0 */ public function setIdToken($idToken) { + $this->idTokenDecoded = $this->decodeIdToken($idToken); + + if (in_array('id_token', $this->persistantMap)) { + $this->store->set('id_token', $idToken); + } + + $this->idToken = $idToken; + return $this; + } + + /** + * Verifies and decodes an ID token using the properties in this class. + * + * @param string $idToken ID token to verify and decode. + * @param array $verifierOptions Options passed to verifier. + * + * @return array + * + * @throws InvalidTokenException + */ + public function decodeIdToken(string $idToken, array $verifierOptions = []) : array + { $idTokenIss = 'https://'.$this->domain.'/'; $sigVerifier = null; if ('RS256' === $this->idTokenAlg) { @@ -628,7 +650,7 @@ class Auth0 $sigVerifier = new SymmetricVerifier($this->clientSecret); } - $verifierOptions = [ + $verifierOptions = $verifierOptions + [ // Set a custom leeway if one was passed to the constructor. 'leeway' => $this->idTokenLeeway, 'max_age' => $this->transientHandler->getOnce('max_age') ?? $this->maxAge, @@ -639,15 +661,8 @@ class Auth0 throw new InvalidTokenException('Nonce value not found in application store'); } - $idTokenVerifier = new IdTokenVerifier($idTokenIss, $this->clientId, $sigVerifier); - $this->idTokenDecoded = $idTokenVerifier->verify($idToken, $verifierOptions); - - if (in_array('id_token', $this->persistantMap)) { - $this->store->set('id_token', $idToken); - } - - $this->idToken = $idToken; - return $this; + $idTokenVerifier = new IdTokenVerifier($idTokenIss, $this->clientId, $sigVerifier); + return $idTokenVerifier->verify($idToken, $verifierOptions); } /** diff --git a/tests/Auth0Test.php b/tests/Auth0Test.php index <HASH>..<HASH> 100644 --- a/tests/Auth0Test.php +++ b/tests/Auth0Test.php @@ -617,6 +617,23 @@ class Auth0Test extends TestCase ); } + public function testThatDecodeIdTokenOptionsAreUsed() + { + $auth0 = new Auth0( self::$baseConfig + ['id_token_alg' => 'HS256'] ); + $_SESSION['auth0__nonce'] = '__test_nonce__'; + $e_message = 'No exception caught'; + try { + $auth0->decodeIdToken( self::getIdToken(), ['max_age' => 10 ] ); + } catch (InvalidTokenException $e) { + $e_message = $e->getMessage(); + } + + $this->assertStringStartsWith( + 'Authentication Time (auth_time) claim in the ID token indicates that too much time has passed', + $e_message + ); + } + /** * @throws ApiException * @throws CoreException
Add Auth0->decodeIdToken() method for ID token decoding by deps
auth0_auth0-PHP
train
0087a4856d0d7b2cda5ed9073fc47ddf327e354b
diff --git a/jerminal-core/src/main/java/com/github/ykrasik/jerminal/internal/annotation/ReflectionCommandExecutor.java b/jerminal-core/src/main/java/com/github/ykrasik/jerminal/internal/annotation/ReflectionCommandExecutor.java index <HASH>..<HASH> 100644 --- a/jerminal-core/src/main/java/com/github/ykrasik/jerminal/internal/annotation/ReflectionCommandExecutor.java +++ b/jerminal-core/src/main/java/com/github/ykrasik/jerminal/internal/annotation/ReflectionCommandExecutor.java @@ -20,7 +20,9 @@ import com.github.ykrasik.jerminal.api.command.CommandArgs; import com.github.ykrasik.jerminal.api.command.CommandExecutor; import com.github.ykrasik.jerminal.api.command.OutputPrinter; import com.github.ykrasik.jerminal.internal.command.PrivilegedCommandArgs; +import com.github.ykrasik.jerminal.internal.exception.ShellException; +import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.List; import java.util.Objects; @@ -48,6 +50,10 @@ public class ReflectionCommandExecutor implements CommandExecutor { reflectionArgs.add(0, outputPrinter); // Invoke method. - method.invoke(instance, reflectionArgs.toArray()); + try { + method.invoke(instance, reflectionArgs.toArray()); + } catch (InvocationTargetException e) { + throw new ShellException(e.getCause()); + } } }
The cause exception is now propagated upwards when an exception happens during a reflection invocation.
ykrasik_jaci
train
84ecd0bd00ad4211c9effbdbba6c7839592f06fd
diff --git a/lib/trouble/middleware.rb b/lib/trouble/middleware.rb index <HASH>..<HASH> 100644 --- a/lib/trouble/middleware.rb +++ b/lib/trouble/middleware.rb @@ -17,6 +17,9 @@ module Trouble raise unless exception.message.include?('invalid %-encoding') return [400, {}, ['']] + rescue ActionController::BadRequest + raise + rescue => exception logger.fatal [$!.class, $!.message, $!.backtrace[2..5]].join("\n") ::Trouble.notify exception
Don't notify Trouble on a bad URI request
bukowskis_trouble
train
6c118cbd7c270de58a47bcd5cbbfb4f14f1036cc
diff --git a/go/client/chat_svc_handler.go b/go/client/chat_svc_handler.go index <HASH>..<HASH> 100644 --- a/go/client/chat_svc_handler.go +++ b/go/client/chat_svc_handler.go @@ -9,6 +9,7 @@ import ( "github.com/keybase/client/go/chat/utils" "github.com/keybase/client/go/libkb" "github.com/keybase/client/go/protocol/chat1" + "github.com/keybase/client/go/protocol/gregor1" "github.com/keybase/client/go/protocol/keybase1" "github.com/keybase/go-framed-msgpack-rpc/rpc" "golang.org/x/net/context" @@ -64,6 +65,14 @@ func (c *chatServiceHandler) ListV1(ctx context.Context, opts listOptionsV1) Rep var cl ChatList cl.Conversations = make([]ConvSummary, len(inbox.ConversationsUnverified)) for i, conv := range inbox.ConversationsUnverified { + readerInfo := conv.ReaderInfo + convUnread := false + var convMtime gregor1.Time + if readerInfo != nil { + convUnread = readerInfo.ReadMsgid < readerInfo.MaxMsgid + convMtime = readerInfo.Mtime + } + maxID := chat1.MessageID(0) for _, msg := range conv.MaxMsgs { if msg.ServerHeader.MessageID > maxID { @@ -76,6 +85,9 @@ func (c *chatServiceHandler) ListV1(ctx context.Context, opts listOptionsV1) Rep Public: pub, TopicType: strings.ToLower(conv.Metadata.IdTriple.TopicType.String()), }, + Unread: convUnread, + ActiveAt: convMtime.UnixSeconds(), + ActiveAtMs: convMtime.UnixMilliseconds(), } maxID = msg.ServerHeader.MessageID } @@ -884,8 +896,11 @@ type Thread struct { // ConvSummary is used for JSON output of a conversation in the inbox. type ConvSummary struct { - ID string `json:"id"` - Channel ChatChannel `json:"channel"` + ID string `json:"id"` + Channel ChatChannel `json:"channel"` + Unread bool `json:"unread"` + ActiveAt int64 `json:"active_at"` + ActiveAtMs int64 `json:"active_at_ms"` } // ChatList is a list of conversations in the inbox.
Include unread, active_at in ConvSummary displayed by list
keybase_client
train
98d2ac169fc27aa2c80c583cdc3bde83d4caa082
diff --git a/lib/chatterbot/search.rb b/lib/chatterbot/search.rb index <HASH>..<HASH> 100644 --- a/lib/chatterbot/search.rb +++ b/lib/chatterbot/search.rb @@ -4,6 +4,8 @@ module Chatterbot # handle Twitter searches module Search + MAX_SEARCH_TWEETS = 1000 + @skip_retweets = true # @@ -29,14 +31,13 @@ module Chatterbot queries = [queries] end - # # search twitter # queries.each { |query| debug "search: #{query} #{default_opts.merge(opts)}" @current_tweet = nil - client.search( query, default_opts.merge(opts) ).each { |s| + client.search( query, default_opts.merge(opts) ).take(MAX_SEARCH_TWEETS).each { |s| update_since_id(s) debug s.text if has_whitelist? && !on_whitelist?(s)
set a limit to the number of tweets we will retrieve in a search
muffinista_chatterbot
train
bd4204dbc5fd9fef82e67ce4dd236209e1c69e26
diff --git a/ghost/limit-service/lib/limit-service.js b/ghost/limit-service/lib/limit-service.js index <HASH>..<HASH> 100644 --- a/ghost/limit-service/lib/limit-service.js +++ b/ghost/limit-service/lib/limit-service.js @@ -118,9 +118,9 @@ class LimitService { } /** - * Checks if any of the configured limits acced + * Checks if any of the configured limits acceded * - * @returns {boolean} + * @returns {Promise<boolean>} */ async checkIfAnyOverLimit() { for (const limit in this.limits) {
Fixed returned value type no issue - The return type was incorrectly declared thworing error during type checking
TryGhost_Ghost
train
2fc9cd97b13c01bc726da02e9f653fd86777cdb9
diff --git a/endpoints-scc-aggregator/src/main/java/com/google/api/scc/aggregator/ReportRequestAggregator.java b/endpoints-scc-aggregator/src/main/java/com/google/api/scc/aggregator/ReportRequestAggregator.java index <HASH>..<HASH> 100644 --- a/endpoints-scc-aggregator/src/main/java/com/google/api/scc/aggregator/ReportRequestAggregator.java +++ b/endpoints-scc-aggregator/src/main/java/com/google/api/scc/aggregator/ReportRequestAggregator.java @@ -176,7 +176,7 @@ public class ReportRequestAggregator { if (cache == null) { return false; } - Preconditions.checkArgument(req.getServiceName() == serviceName, "service name mismatch"); + Preconditions.checkArgument(req.getServiceName().equals(serviceName), "service name mismatch"); if (hasHighImportanceOperation(req)) { return false; } diff --git a/endpoints-scc-aggregator/src/main/java/com/google/api/scc/model/ReportRequestInfo.java b/endpoints-scc-aggregator/src/main/java/com/google/api/scc/model/ReportRequestInfo.java index <HASH>..<HASH> 100644 --- a/endpoints-scc-aggregator/src/main/java/com/google/api/scc/model/ReportRequestInfo.java +++ b/endpoints-scc-aggregator/src/main/java/com/google/api/scc/model/ReportRequestInfo.java @@ -104,7 +104,7 @@ public class ReportRequestInfo extends OperationInfo { * @param ticker Ticker */ public ReportRequest asReportRequest(ReportingRule rules, Ticker ticker) { - Preconditions.checkState(Strings.isNullOrEmpty(getServiceName())); + Preconditions.checkState(!Strings.isNullOrEmpty(getServiceName())); // Populate metrics and labels if they can be associated with a method/operation Operation.Builder o = asOperation(ticker).toBuilder();
fix two precondition checks in reporting - One precondition was flipped; service name should not be null, rather than should always be null - One precondition used == instead of .equals Change-Id: Id8eefe<I>cf<I>ccdedf<I>f<I>aad1bc<I>b<I>c9
cloudendpoints_endpoints-management-java
train
2e0c886eb2d7fbb94bd677aa6ccf6490f91dd234
diff --git a/tests/parser.py b/tests/parser.py index <HASH>..<HASH> 100644 --- a/tests/parser.py +++ b/tests/parser.py @@ -149,3 +149,15 @@ class GcvsParserTestCase(unittest.TestCase): parser = GcvsParser(fp) rows = [row for row in parser] self.assertEqual(len(rows), 0) + + def test_iter_row_exception(self): + """ + Exception raised when parsing a row excludes that row from iteration. + + (XXX and ZZZZ are not valid floating point values for magnitude.) + """ + file_contents = "\n\n010001 |R And *|002402.0+383437 |M | XXX | ZZZZ | |V |53820. | | 409.2 |38 |S3,5e-S8,8e(M7e) |HIP 00002|" + fp = StringIO(file_contents) + parser = GcvsParser(fp) + rows = [row for row in parser] + self.assertEqual(len(rows), 0)
Test for exception when parsing a row.
zsiciarz_pygcvs
train
8d12aad4dccbae639022e8f81884d698f26e2019
diff --git a/lib/git_hook.rb b/lib/git_hook.rb index <HASH>..<HASH> 100644 --- a/lib/git_hook.rb +++ b/lib/git_hook.rb @@ -64,11 +64,11 @@ module Causes next if check_class.filetype && check.staged.empty? title = " Checking #{check.name}..." - print title + print title unless check.stealth? status, output = check.run_check - print_incremental_result(title, status, output) + print_incremental_result(title, status, output, check.stealth?) [status, output] end.compact print_result results @@ -81,7 +81,12 @@ module Causes false end - def print_incremental_result(title, status, output) + def print_incremental_result(title, status, output, stealth = false) + if stealth + return if status == :good + print title + end + print '.' * (@width - title.length) case status when :good diff --git a/lib/hook_specific_check.rb b/lib/hook_specific_check.rb index <HASH>..<HASH> 100644 --- a/lib/hook_specific_check.rb +++ b/lib/hook_specific_check.rb @@ -14,6 +14,11 @@ module Causes include FileMethods class << self attr_accessor :filetype + attr_accessor :stealth + + def stealth! + self.stealth = true + end end def initialize(*args) @@ -28,6 +33,10 @@ module Causes false end + def stealth? + self.class.stealth + end + def staged @staged ||= staged_files(self.class.filetype) end diff --git a/lib/plugins/commit_msg/russian_novel.rb b/lib/plugins/commit_msg/russian_novel.rb index <HASH>..<HASH> 100644 --- a/lib/plugins/commit_msg/russian_novel.rb +++ b/lib/plugins/commit_msg/russian_novel.rb @@ -2,6 +2,8 @@ module Causes::GitHook class RussianNovel < HookSpecificCheck include HookRegistry + stealth! + RUSSIAN_NOVEL_LENGTH = 30 def run_check if user_commit_message.length > RUSSIAN_NOVEL_LENGTH
Add 'stealth' checks Perfect for easter eggs. Only outputs something if the check exits non-'good'. Change-Id: Ieff<I>a6a<I>c<I>ed<I>f<I>f1f4cf<I>a8ba Reviewed-on: <URL>
sds_overcommit
train
78d66626dee5450f2fe1f4a4d444205056005627
diff --git a/pkg/api/validation/validation.go b/pkg/api/validation/validation.go index <HASH>..<HASH> 100644 --- a/pkg/api/validation/validation.go +++ b/pkg/api/validation/validation.go @@ -2011,6 +2011,19 @@ func validateOnlyAddedTolerations(newTolerations []api.Toleration, oldToleration return allErrs } +func ValidateHostAliases(hostAliases []api.HostAlias, fldPath *field.Path) field.ErrorList { + allErrs := field.ErrorList{} + for _, hostAlias := range hostAliases { + if ip := net.ParseIP(hostAlias.IP); ip == nil { + allErrs = append(allErrs, field.Invalid(fldPath.Child("ip"), hostAlias.IP, "must be valid IP address")) + } + for _, hostname := range hostAlias.Hostnames { + allErrs = append(allErrs, ValidateDNS1123Label(hostname, fldPath.Child("hostnames"))...) + } + } + return allErrs +} + // ValidateTolerations tests if given tolerations have valid data. func ValidateTolerations(tolerations []api.Toleration, fldPath *field.Path) field.ErrorList { allErrors := field.ErrorList{} @@ -2112,6 +2125,10 @@ func ValidatePodSpec(spec *api.PodSpec, fldPath *field.Path) field.ErrorList { allErrs = append(allErrs, ValidateTolerations(spec.Tolerations, fldPath.Child("tolerations"))...) } + if len(spec.HostAliases) > 0 { + allErrs = append(allErrs, ValidateHostAliases(spec.HostAliases, fldPath.Child("hostAliases"))...) + } + return allErrs } diff --git a/pkg/api/validation/validation_test.go b/pkg/api/validation/validation_test.go index <HASH>..<HASH> 100644 --- a/pkg/api/validation/validation_test.go +++ b/pkg/api/validation/validation_test.go @@ -3202,6 +3202,13 @@ func TestValidatePodSpec(t *testing.T) { RestartPolicy: api.RestartPolicyAlways, DNSPolicy: api.DNSClusterFirst, }, + { // Populate HostAliases. + HostAliases: []api.HostAlias{{IP: "12.34.56.78", Hostnames: []string{"host1", "host2"}}}, + Volumes: []api.Volume{{Name: "vol", VolumeSource: api.VolumeSource{EmptyDir: &api.EmptyDirVolumeSource{}}}}, + Containers: []api.Container{{Name: "ctr", Image: "image", ImagePullPolicy: "IfNotPresent", TerminationMessagePolicy: "File"}}, + RestartPolicy: api.RestartPolicyAlways, + DNSPolicy: api.DNSClusterFirst, + }, } for i := range successCases { if errs := ValidatePodSpec(&successCases[i], field.NewPath("field")); len(errs) != 0 { @@ -3268,6 +3275,18 @@ func TestValidatePodSpec(t *testing.T) { }, HostAliases: []api.HostAlias{{IP: "12.34.56.78", Hostnames: []string{"host1", "host2"}}}, }, + "with hostAliases with invalid IP": { + SecurityContext: &api.PodSecurityContext{ + HostNetwork: false, + }, + HostAliases: []api.HostAlias{{IP: "999.999.999.999", Hostnames: []string{"host1", "host2"}}}, + }, + "with hostAliases with invalid hostname": { + SecurityContext: &api.PodSecurityContext{ + HostNetwork: false, + }, + HostAliases: []api.HostAlias{{IP: "12.34.56.78", Hostnames: []string{"@#$^#@#$"}}}, + }, "bad supplementalGroups large than math.MaxInt32": { Containers: []api.Container{{Name: "ctr", Image: "image", ImagePullPolicy: "IfNotPresent", TerminationMessagePolicy: "File"}}, SecurityContext: &api.PodSecurityContext{
validate HostAliases have valid IP and hostnames
kubernetes_kubernetes
train
4bae0c92285de0be16729411ae5f864cddb31285
diff --git a/python/ray/util/client/__init__.py b/python/ray/util/client/__init__.py index <HASH>..<HASH> 100644 --- a/python/ray/util/client/__init__.py +++ b/python/ray/util/client/__init__.py @@ -1,5 +1,6 @@ from typing import List, Tuple, Dict, Any +import os import sys import logging @@ -81,7 +82,7 @@ class RayAPIStub: msg = "Python minor versions differ between client and server:" + \ f" client is {version_str}," + \ f" server is {conn_info['python_version']}" - if ignore_version: + if ignore_version or "RAY_IGNORE_VERSION_MISMATCH" in os.environ: logger.warning(msg) else: raise RuntimeError(msg) @@ -89,7 +90,7 @@ class RayAPIStub: msg = "Client Ray installation incompatible with server:" + \ f" client is {CURRENT_PROTOCOL_VERSION}," + \ f" server is {conn_info['protocol_version']}" - if ignore_version: + if ignore_version or "RAY_IGNORE_VERSION_MISMATCH" in os.environ: logger.warning(msg) else: raise RuntimeError(msg) diff --git a/python/ray/util/client_connect.py b/python/ray/util/client_connect.py index <HASH>..<HASH> 100644 --- a/python/ray/util/client_connect.py +++ b/python/ray/util/client_connect.py @@ -9,7 +9,9 @@ from typing import List, Tuple, Dict, Any def connect(conn_str: str, secure: bool = False, metadata: List[Tuple[str, str]] = None, - connection_retries: int = 3) -> Dict[str, Any]: + connection_retries: int = 3, + *, + ignore_version: bool = False) -> Dict[str, Any]: if ray.is_connected(): raise RuntimeError("Ray Client is already connected. " "Maybe you called ray.util.connect twice by " @@ -23,7 +25,11 @@ def connect(conn_str: str, # for supporting things like cert_path, ca_path, etc and creating # the correct metadata return ray.connect( - conn_str, secure=secure, metadata=metadata, connection_retries=3) + conn_str, + secure=secure, + metadata=metadata, + connection_retries=3, + ignore_version=ignore_version) def disconnect():
[client] Allow ignoring version mismatch with env var for debugging (#<I>)
ray-project_ray
train
d71a2975d43d5a456f9127c74f0f5de7995ac1b1
diff --git a/gitlfs-server/src/main/java/ru/bozaro/gitlfs/server/LockManager.java b/gitlfs-server/src/main/java/ru/bozaro/gitlfs/server/LockManager.java index <HASH>..<HASH> 100644 --- a/gitlfs-server/src/main/java/ru/bozaro/gitlfs/server/LockManager.java +++ b/gitlfs-server/src/main/java/ru/bozaro/gitlfs/server/LockManager.java @@ -21,17 +21,17 @@ public interface LockManager { interface LockRead { @NotNull - List<Lock> getLocks(@Nullable String path, @Nullable String lockId, @Nullable Ref ref); + List<Lock> getLocks(@Nullable String path, @Nullable String lockId, @Nullable Ref ref) throws IOException; } interface LockWrite extends LockRead { @NotNull - Lock lock(@NotNull String path, @Nullable Ref ref) throws LockConflictException; + Lock lock(@NotNull String path, @Nullable Ref ref) throws LockConflictException, IOException; @Nullable - Lock unlock(@NotNull String lockId, boolean force, @Nullable Ref ref) throws LockConflictException; + Lock unlock(@NotNull String lockId, boolean force, @Nullable Ref ref) throws LockConflictException, IOException; @NotNull - VerifyLocksResult verifyLocks(@Nullable Ref ref); + VerifyLocksResult verifyLocks(@Nullable Ref ref) throws IOException; } } diff --git a/gitlfs-server/src/main/java/ru/bozaro/gitlfs/server/LocksServlet.java b/gitlfs-server/src/main/java/ru/bozaro/gitlfs/server/LocksServlet.java index <HASH>..<HASH> 100644 --- a/gitlfs-server/src/main/java/ru/bozaro/gitlfs/server/LocksServlet.java +++ b/gitlfs-server/src/main/java/ru/bozaro/gitlfs/server/LocksServlet.java @@ -99,7 +99,7 @@ public class LocksServlet extends HttpServlet { } @NotNull - private ResponseWriter listLocks(@NotNull HttpServletRequest req, @NotNull LockManager.LockRead lockRead) { + private ResponseWriter listLocks(@NotNull HttpServletRequest req, @NotNull LockManager.LockRead lockRead) throws IOException { final String refName = req.getParameter("refspec"); final String path = req.getParameter("path");
Allow LockManager to throw IOException on errors
bozaro_git-lfs-java
train
09be4ed93656cb2a4bdf160fd500de76348451b3
diff --git a/src/Composer/Repository/PlatformRepository.php b/src/Composer/Repository/PlatformRepository.php index <HASH>..<HASH> 100644 --- a/src/Composer/Repository/PlatformRepository.php +++ b/src/Composer/Repository/PlatformRepository.php @@ -90,7 +90,7 @@ class PlatformRepository extends ArrayRepository break; case 'uuid': - $prettyVersion = UUID_VERSION; + $prettyVersion = phpversion('uuid'); break; case 'xsl':
PECL-UUID does not define a version constant, so we should use phpversion() to fetch the required information
mothership-ec_composer
train
12c66e92b1f37c11c69da8d3718e4f988f400c6c
diff --git a/config/playroom/makePlayroomConfig.js b/config/playroom/makePlayroomConfig.js index <HASH>..<HASH> 100644 --- a/config/playroom/makePlayroomConfig.js +++ b/config/playroom/makePlayroomConfig.js @@ -38,5 +38,8 @@ module.exports = () => ({ module: clientWebpackConfig.module, resolve: clientWebpackConfig.resolve, plugins: clientWebpackConfig.plugins, + optimization: { + concatenateModules: false, + }, }), });
fix(playroom): Update webpack config (#<I>)
seek-oss_sku
train