hash stringlengths 40 40 | diff stringlengths 131 26.7k | message stringlengths 7 694 | project stringlengths 5 67 | split stringclasses 1
value | diff_languages stringlengths 2 24 |
|---|---|---|---|---|---|
6a1648ec67fd41c8c64a51f00ba195d609393076 | diff --git a/src/nls/de/strings.js b/src/nls/de/strings.js
index <HASH>..<HASH> 100644
--- a/src/nls/de/strings.js
+++ b/src/nls/de/strings.js
@@ -288,7 +288,7 @@ define({
"ABOUT_TEXT_LINE3" : "Hinweise, Bestimmungen und Bedingungen, die sich auf Drittanbieter-Software beziehen, finden sich unter <a class=\"clickable-link\" data-href=\"http://www.adobe.com/go/thirdparty/\">http://www.adobe.com/go/thirdparty/</a> und sind hier durch Bezugnahme eingeschlossen.",
"ABOUT_TEXT_LINE4" : "Dokumentation und Quellcode unter <a class=\"clickable-link\" data-href=\"https://github.com/adobe/brackets/\">https://github.com/adobe/brackets/</a>",
"ABOUT_TEXT_LINE5" : "Gemacht mit \u2764 und JavaScript von:",
- "ABOUT_TEXT_LINE6" : "Vielen Leuten (aber wir haben gerade Probleme, diese Daten zu laden).",
+ "ABOUT_TEXT_LINE6" : "…vielen Leuten (…leider haben wir aber gerade Probleme, diese Daten zu laden).",
"UPDATE_NOTIFICATION_TOOLTIP" : "Eine neue Version von {APP_NAME} ist verfügbar! Für Details hier klicken.",
"UPDATE_AVAILABLE_TITLE" : "Update verfügbar",
"UPDATE_MESSAGE" : "Hallo! Eine neue Version von {APP_NAME} ist verfügbar. Hier einige der neuen Funktionen:", | Apply suggestion by @pthiess for ABOUT_TEXT_LINE6 in 'de' locale | adobe_brackets | train | js |
471e5f7412ec2d4f7a81aebba55a946bad49c346 | diff --git a/lib/gem/release/version.rb b/lib/gem/release/version.rb
index <HASH>..<HASH> 100644
--- a/lib/gem/release/version.rb
+++ b/lib/gem/release/version.rb
@@ -1,5 +1,5 @@
module Gem
module Release
- VERSION = '2.0.0.dev.4'
+ VERSION = '2.0.0.dev.5'
end
end | Bump to <I>.de<I> | svenfuchs_gem-release | train | rb |
94663424ae5ae9856b40a9f170762b4197024661 | diff --git a/sysfs/net_class.go b/sysfs/net_class.go
index <HASH>..<HASH> 100644
--- a/sysfs/net_class.go
+++ b/sysfs/net_class.go
@@ -80,7 +80,7 @@ func (fs FS) NewNetClass() (NetClass, error) {
netClass := NetClass{}
for _, deviceDir := range devices {
- if !deviceDir.IsDir() {
+ if deviceDir.Mode().IsRegular() {
continue
}
interfaceClass, err := netClass.parseNetClassIface(path + "/" + deviceDir.Name()) | sysfs/nettclass: Ignore regular files only
Symlinks are acceptable. Resolves #<I>. | prometheus_procfs | train | go |
c40b0b232f4b04102b8220ced99cc4ae638a65df | diff --git a/flink-python/setup.py b/flink-python/setup.py
index <HASH>..<HASH> 100644
--- a/flink-python/setup.py
+++ b/flink-python/setup.py
@@ -224,7 +224,7 @@ run sdist.
author_email='dev@flink.apache.org',
python_requires='>=3.5',
install_requires=['py4j==0.10.8.1', 'python-dateutil==2.8.0', 'apache-beam==2.19.0',
- 'cloudpickle==1.2.2'],
+ 'cloudpickle==1.2.2', 'avro-python3>=1.8.1,<=1.9.1'],
tests_require=['pytest==4.4.1'],
description='Apache Flink Python API',
long_description=long_description, | [FLINK-<I>][python] Limit the version of avro-python3
This closes #<I>. | apache_flink | train | py |
1412d3d8748585f422eb946a06bfe903160d3b01 | diff --git a/tests/run_command.py b/tests/run_command.py
index <HASH>..<HASH> 100644
--- a/tests/run_command.py
+++ b/tests/run_command.py
@@ -1,5 +1,6 @@
import os
import subprocess
+import sys
from trashcli import base_dir
@@ -17,7 +18,7 @@ def run_command(cwd, command, args=None, input='', env=None):
if args == None:
args = []
command_full_path = os.path.join(base_dir, command)
- process = subprocess.Popen(["python", command_full_path] + args,
+ process = subprocess.Popen([sys.executable, command_full_path] + args,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE, | Now integration tests should run also on Debian system where the only available Python executable is python3 | andreafrancia_trash-cli | train | py |
c6a72cd39075e63b685fb4ae5900b0071c47b965 | diff --git a/symbionts/pressbooks-latex/pb-latex-admin.php b/symbionts/pressbooks-latex/pb-latex-admin.php
index <HASH>..<HASH> 100644
--- a/symbionts/pressbooks-latex/pb-latex-admin.php
+++ b/symbionts/pressbooks-latex/pb-latex-admin.php
@@ -18,7 +18,9 @@ class PBLatexAdmin extends PBLatex {
// since we're activating at the network level, this needs to be called in the constructor
$this->addOptions();
- add_action( 'admin_menu', array( &$this, 'adminMenu' ) );
+ if ( \Pressbooks\Book::isBook() ) {
+ add_action( 'admin_menu', [ &$this, 'adminMenu' ] );
+ }
}
function adminMenu() { | PB Latex settings should only appear in books (fixes #<I>) (#<I>) | pressbooks_pressbooks | train | php |
259cdfa0defd278e449a235258746089f1ed3fcb | diff --git a/python/ray/services.py b/python/ray/services.py
index <HASH>..<HASH> 100644
--- a/python/ray/services.py
+++ b/python/ray/services.py
@@ -1563,7 +1563,7 @@ def start_raylet_monitor(redis_address,
"--config_list={}".format(config_str),
]
if redis_password:
- command += [redis_password]
+ command += ["--redis_password={}".format(redis_password)]
process_info = start_ray_process(
command,
ray_constants.PROCESS_TYPE_RAYLET_MONITOR, | Fix issue when starting `raylet_monitor` (#<I>) | ray-project_ray | train | py |
7c2758d6b69a6ff0899bcf9589364dec00426864 | diff --git a/src/org/ddogleg/struct/DogLinkedList.java b/src/org/ddogleg/struct/DogLinkedList.java
index <HASH>..<HASH> 100644
--- a/src/org/ddogleg/struct/DogLinkedList.java
+++ b/src/org/ddogleg/struct/DogLinkedList.java
@@ -33,14 +33,14 @@ import java.util.Objects;
*/
public class DogLinkedList<T> {
// first element in the list
- @Nullable Element<T> first;
+ protected @Nullable Element<T> first;
// last element in the list
- @Nullable Element<T> last;
+ protected @Nullable Element<T> last;
// total number of elements in the list
- int size;
+ protected int size;
// recycled elements. It is assumed that all elements inside of here have all parameters set to null already
- final ArrayDeque<Element<T>> available = new ArrayDeque<>();
+ protected final ArrayDeque<Element<T>> available = new ArrayDeque<>();
/**
* Puts the linked list back into its initial state. Elements are saved for later use. | DogLinkedList
- Changed fields from package protected to protected | lessthanoptimal_ddogleg | train | java |
4265c9c515ebf24e24083e160713630f5aa08a3f | diff --git a/py2pack/__init__.py b/py2pack/__init__.py
index <HASH>..<HASH> 100644
--- a/py2pack/__init__.py
+++ b/py2pack/__init__.py
@@ -1,5 +1,5 @@
__doc__ = 'Generate distribution packages from Python packages on PyPI'
__author__ = 'Sascha Peilicke <saschpe@gmx.de>'
-__version__ = '0.2.4'
+__version__ = '0.2.5'
from py2pack import list, search, fetch, generate, main | Bump the version number to '<I>'. | openSUSE_py2pack | train | py |
75f2716d033e4d11a10243b70618bfae64595bee | diff --git a/src/ossos-pipeline/ossos/gui/models/workload.py b/src/ossos-pipeline/ossos/gui/models/workload.py
index <HASH>..<HASH> 100644
--- a/src/ossos-pipeline/ossos/gui/models/workload.py
+++ b/src/ossos-pipeline/ossos/gui/models/workload.py
@@ -1,5 +1,6 @@
from glob import glob
import re
+from astropy import units
__author__ = "David Rusk <drusk@uvic.ca>"
@@ -16,7 +17,8 @@ from .exceptions import (NoAvailableWorkException, SourceNotNamedException)
from ..progress import FileLockedException
-from ...astrom import StreamingAstromWriter, Source
+from ...astrom import StreamingAstromWriter, Source, SourceReading
+from ...mpc import Observation
from ...orbfit import Orbfit
@@ -396,6 +398,10 @@ class TracksWorkUnit(WorkUnit):
return self.builder.build_workunit(mpc_filename)
def save(self):
+ """
+ Update the SouceReading information for the currently recorded observations and then flush those to a file.
+ @return: mpc_filename of the resulting save.
+ """
self.get_writer().flush()
mpc_filename = self.output_context.get_full_path(self.get_writer().get_filename())
self.get_writer().close() | added a return type to a method so other parts of the code can check that they are getting the kind of variable they want.
PyEphem will catch bad return type matching, but only if the comments of the method give some hints. | OSSOS_MOP | train | py |
30a11ddb653d2e63714a1b1544b83cb3314ecb98 | diff --git a/src/com/jfoenix/controls/JFXDialog.java b/src/com/jfoenix/controls/JFXDialog.java
index <HASH>..<HASH> 100644
--- a/src/com/jfoenix/controls/JFXDialog.java
+++ b/src/com/jfoenix/controls/JFXDialog.java
@@ -57,7 +57,8 @@ import com.jfoenix.transitions.CachedTransition;
/**
* @author Shadi Shaheen
- *
+ * note that for JFXDialog to work properly the root node should
+ * be of type {@link StackPane}
*/
@DefaultProperty(value="content")
public class JFXDialog extends StackPane { | Note that JFXDialog requires the root to be stack pane to work properly | jfoenixadmin_JFoenix | train | java |
a5192ac805ec2ae711b8ab1b342c2cc217258764 | diff --git a/src/org/openscience/cdk/renderer/MoleculeViewer2D.java b/src/org/openscience/cdk/renderer/MoleculeViewer2D.java
index <HASH>..<HASH> 100644
--- a/src/org/openscience/cdk/renderer/MoleculeViewer2D.java
+++ b/src/org/openscience/cdk/renderer/MoleculeViewer2D.java
@@ -174,7 +174,7 @@ public class MoleculeViewer2D extends JPanel implements CDKChangeListener
{
StructureDiagramGenerator sdg = new StructureDiagramGenerator();
MoleculeViewer2D mv = new MoleculeViewer2D();
- mv.getFrame().setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
+ mv.getFrame().setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE);
Renderer2DModel r2dm = mv.getRenderer2DModel();
r2dm.setDrawNumbers(true); | Tried to fix exit method - didn't work
git-svn-id: <URL> | cdk_cdk | train | java |
ff3bdb2d7e960d560c493ed0d634a3d06286c2c2 | diff --git a/lib/term_buffer.js b/lib/term_buffer.js
index <HASH>..<HASH> 100644
--- a/lib/term_buffer.js
+++ b/lib/term_buffer.js
@@ -505,6 +505,13 @@ TermBuffer.prototype.scroll = function(dir, lines) {
}
};
+TermBuffer.prototype.write = function(write, encoding) {
+ if(this._writer === undefined) {
+ console.warn("TermBuffer.write is deprecated. Use TermWriter.write insted!");
+ this._writer = new (require("./term_writer.js"))(this);
+ }
+ return this._writer.write.apply(this._writer, arguments);
+}
// Generates a diff between this.term and OtherTerm
// if this diff is applied to this.term it results in the same as OtherTerm | Add compatibility write function to TermBuffer | Gottox_terminal.js | train | js |
8fec3649217b9e1b56af8bfb6de0e0fb5430c93d | diff --git a/modules/core/src/main/java/org/torquebox/core/pool/SharedPool.java b/modules/core/src/main/java/org/torquebox/core/pool/SharedPool.java
index <HASH>..<HASH> 100644
--- a/modules/core/src/main/java/org/torquebox/core/pool/SharedPool.java
+++ b/modules/core/src/main/java/org/torquebox/core/pool/SharedPool.java
@@ -20,9 +20,9 @@
package org.torquebox.core.pool;
import java.util.ArrayList;
-import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.WeakHashMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.atomic.AtomicInteger;
@@ -290,7 +290,7 @@ public class SharedPool<T> implements Pool<T> {
/** The previous shared instances. */
private List<T> previousInstances = new ArrayList<T>();
- private Map<T, AtomicInteger> instanceCounts = new HashMap<T, AtomicInteger>();
+ private Map<T, AtomicInteger> instanceCounts = new WeakHashMap<T, AtomicInteger>();
/** Optional factory to create the initial instance. */
private InstanceFactory<T> factory; | Remove another potential runtime memory leak from zero-downtime implementation (TORQUE-<I>) | torquebox_torquebox | train | java |
a8ce3fcc07ef435f95b99e58838b26f92401e388 | diff --git a/src/js/player.js b/src/js/player.js
index <HASH>..<HASH> 100644
--- a/src/js/player.js
+++ b/src/js/player.js
@@ -906,8 +906,11 @@ class MediaElementPlayer {
// Fixing an Android stock browser bug, where "seeked" isn't fired correctly after
// ending the video and jumping to the beginning
setTimeout(() => {
- t.container.querySelector(`.${t.options.classPrefix}overlay-loading`)
- .parentNode.style.display = 'none';
+ const loadingElement = t.container
+ .querySelector(`.${t.options.classPrefix}overlay-loading`);
+ if (loadingElement && loadingElement.parentNode) {
+ loadingElement.parentNode.style.display = 'none';
+ }
}, 20);
} catch (exp) {
console.log(exp);
@@ -1906,7 +1909,7 @@ class MediaElementPlayer {
node.style.display = '';
t.container.parentNode.insertBefore(node, t.container);
t.node.remove();
-
+
// Add children
if (t.mediaFiles) {
for (let i = 0, total = t.mediaFiles.length; i < total; i++) { | Remove loading overlay only if it exists instead of failing | mediaelement_mediaelement | train | js |
77cfd6f41e6e2fed31ce489785cb3534ef10d043 | diff --git a/Task/Mysql/DbFindReplace.php b/Task/Mysql/DbFindReplace.php
index <HASH>..<HASH> 100644
--- a/Task/Mysql/DbFindReplace.php
+++ b/Task/Mysql/DbFindReplace.php
@@ -8,7 +8,7 @@ class DbFindReplace extends \Qobo\Robo\AbstractCmdTask
* {@inheritdoc}
*/
protected $data = [
- 'cmd' => './vendor/interconnectit/search-replace-db/srdb.cli.php -h %%HOST%% %%PORT%% -u %%USER%% -p %%PASS%% -n %%DB%% -s %%SEARCH%% -r %%REPLACE%%',
+ 'cmd' => './vendor/bin/srdb.cli.php -h %%HOST%% %%PORT%% -u %%USER%% -p %%PASS%% -n %%DB%% -s %%SEARCH%% -r %%REPLACE%%',
'path' => ['./'],
'host' => 'localhost',
'user' => 'root', | Updated phake-builder (task #<I>)
* Updated phake-builder to v4 as it fixes find-replace issues
* Adjusted DbFindReplace task to utilize vendor/bin for mysql
find-replace instead of direct path to the script in vendor | QoboLtd_qobo-robo | train | php |
085d802a7d55ad12fc429db9626628b69e2ddd6f | diff --git a/src/expressHelpers.js b/src/expressHelpers.js
index <HASH>..<HASH> 100644
--- a/src/expressHelpers.js
+++ b/src/expressHelpers.js
@@ -4,24 +4,17 @@ import {context} from './context'
import {createChannel} from './messaging'
import onHeaders from 'on-headers'
-const appToChan = new WeakMap()
const middlewares = new WeakMap()
+const channel = createChannel()
+
export function register(app, verb, pattern, reqHandler) {
- if (!appToChan.has(app)) {
- appToChan.set(app, createChannel())
- }
- const reqChannel = appToChan.get(app)
app[verb](pattern, (req, res, next) => {
- reqChannel.put([req, res, next, reqHandler])
+ channel.put([req, res, next, reqHandler])
})
}
-export function* runApp(app) {
- if (!appToChan.has(app)) {
- throw new Error('you should register at least one handler before running')
- }
- const channel = appToChan.get(app)
+export function* runApp() {
while (true) {
const [req, res, next, reqHandler] = yield channel.take() | Change expressHelpers API: one runApp run is needed
There is now a single channel shared by all registered handlers, runApp
listens to this channel | vacuumlabs_yacol | train | js |
06c7cc9149936a72c3b648841ff9eecc7d75718e | diff --git a/OpenSSL/test/test_ssl.py b/OpenSSL/test/test_ssl.py
index <HASH>..<HASH> 100644
--- a/OpenSSL/test/test_ssl.py
+++ b/OpenSSL/test/test_ssl.py
@@ -1340,24 +1340,7 @@ class MemoryBIOTests(TestCase, _LoopbackMixin):
code, as no memory BIO is involved here). Even though this isn't a
memory BIO test, it's convenient to have it here.
"""
- (server, client) = socket_pair()
-
- # Let the encryption begin...
- client_conn = self._client(client)
- server_conn = self._server(server)
-
- # Establish the connection
- established = False
- while not established:
- established = True # assume the best
- for ssl in client_conn, server_conn:
- try:
- # Generally a recv() or send() could also work instead
- # of do_handshake(), and we would stop on the first
- # non-exception.
- ssl.do_handshake()
- except WantReadError:
- established = False
+ server_conn, client_conn = self._loopback()
important_message = b("Help me Obi Wan Kenobi, you're my only hope.")
client_conn.send(important_message) | Switch to the loopback setup helper in test_socketConnect, incidentally converting the connections to blocking, which avoids the OS X recv error. | pyca_pyopenssl | train | py |
831c3de6cbd39cc89d5ae4b008080dd0af8041fa | diff --git a/pnc_cli/bpmbuildconfigurations.py b/pnc_cli/bpmbuildconfigurations.py
index <HASH>..<HASH> 100644
--- a/pnc_cli/bpmbuildconfigurations.py
+++ b/pnc_cli/bpmbuildconfigurations.py
@@ -49,8 +49,10 @@ def create_build_configuration_process(repository, revision, **kwargs):
if not kwargs.get("generic_parameters"):
kwargs["generic_parameters"] = {}
- kwargs["project"] = projects_api.get_specific(kwargs.get("project_id")).content
- kwargs["environment"] = envs_api.get_specific(kwargs.get("build_environment_id")).content
+ if not kwargs.get("project"):
+ kwargs["project"] = projects_api.get_specific(kwargs.get("project_id")).content
+ if not kwargs.get("environment"):
+ kwargs["environment"] = envs_api.get_specific(kwargs.get("build_environment_id")).content
build_configuration = create_build_conf_object(scm_revision=revision, **kwargs)
repo_creation = swagger_client.RepositoryCreationUrlAutoRest() | fix: get fields by id in BPM BC create only when needed | project-ncl_pnc-cli | train | py |
1ab9eb82ec59080bd0ad4e79a1e6136113d5458b | diff --git a/contrib/backporting/set-labels.py b/contrib/backporting/set-labels.py
index <HASH>..<HASH> 100755
--- a/contrib/backporting/set-labels.py
+++ b/contrib/backporting/set-labels.py
@@ -11,7 +11,11 @@ import argparse
import os
import sys
-from github import Github
+try:
+ from github import Github
+except ImportError:
+ print("pygithub not found you can install it by running 'pip3 install --user PyGithub'")
+ sys.exit(-1)
parser = argparse.ArgumentParser()
parser.add_argument('pr_number', type=int) | contrib/backporting: print helper message how to install missing library
Fixes: 8ae<I>d<I>be1b ("Add label script for backporting") | cilium_cilium | train | py |
60b5bf5ad2b82647339697b2b2a1becb4f0040f5 | diff --git a/core/DataTable/Renderer/Csv.php b/core/DataTable/Renderer/Csv.php
index <HASH>..<HASH> 100644
--- a/core/DataTable/Renderer/Csv.php
+++ b/core/DataTable/Renderer/Csv.php
@@ -239,6 +239,8 @@ class Csv extends Renderer
$value = $this->formatFormulas($value);
+ $value = str_replace(["\t"], ' ', $value);
+
if (is_string($value)
&& (strpos($value, '"') !== false
|| strpos($value, $this->separator) !== false) | Improve the sanitization of request parameters (#<I>)
* Improve the sanitization of request parameters by replacing tab characters with spaces
* Adjust scope of value sanitization
* Fix commented line mistake | matomo-org_matomo | train | php |
180961755330b7276ca31d04d2c39391d2f2079c | diff --git a/lib/mumble-ruby/user.rb b/lib/mumble-ruby/user.rb
index <HASH>..<HASH> 100644
--- a/lib/mumble-ruby/user.rb
+++ b/lib/mumble-ruby/user.rb
@@ -12,6 +12,13 @@ module Mumble
attribute :self_mute
attribute :self_deaf
+ def initialize(client, data)
+ super(client, data)
+ if channel_id.nil?
+ self.update({"channel_id" => 0})
+ end
+ end
+
def current_channel
client.channels[channel_id]
end | Set channel_id to 0 for users in root channel | mattvperry_mumble-ruby | train | rb |
a34cf18ca24a496de8c8bda5ca25aafcce9d4bbb | diff --git a/src/Mapper.php b/src/Mapper.php
index <HASH>..<HASH> 100644
--- a/src/Mapper.php
+++ b/src/Mapper.php
@@ -39,6 +39,16 @@ class Mapper
}
/**
+ * Getter for all properties
+ * @param string $name name of property to retrieve
+ * @return mixed value of property
+ */
+ public function __get($name)
+ {
+ return $this->$name;
+ }
+
+ /**
* Add models namespace to type, if it is not null
* @param string $type type name
* @return string type name prepended with models namespace if it is not null
diff --git a/test/MapperTest.php b/test/MapperTest.php
index <HASH>..<HASH> 100644
--- a/test/MapperTest.php
+++ b/test/MapperTest.php
@@ -84,6 +84,11 @@ class MapperTest extends AbstractTestCase
$this->assertNull($user);
}
+ public function testMagicGet()
+ {
+ $this->assertInstanceOf('MongoDB', $this->mapper->mongodb);
+ }
+
public function testFetchObjects()
{
$users = $this->mapper->fetchObjects('users', 'User', ['type' => User::TYPE_ADMIN]); | Add __get method to Mapper class to allow getting object properties | dintel_mongo-object | train | php,php |
aeacbc2eabff230887cae7413c060d231d70bc5e | diff --git a/spec/main-spec.js b/spec/main-spec.js
index <HASH>..<HASH> 100644
--- a/spec/main-spec.js
+++ b/spec/main-spec.js
@@ -8,6 +8,8 @@ import { fixtureFilename } from './utils';
describe('Atom Grammar Test Jasmine', () => {
+ beforeEach(() => atom.config.set('core.useTreeSitterParsers', false));
+
describe('C Syntax Assertions', () => {
beforeEach(() => waitsForPromise(() => atom.packages.activatePackage('language-c')));
grammarTest(fixtureFilename('C/syntax_test_c_example.c')); | Disable tree-sitter for tests
atom-grammar-test doesn't currently work with tree-sitter since they
don't expose tokens in `tokenizeLine`
<URL> | kevinastone_atom-grammar-test | train | js |
83ad596c4ec8eccf37c0e288f96b46f64a5501a3 | diff --git a/lib/plucky/query.rb b/lib/plucky/query.rb
index <HASH>..<HASH> 100644
--- a/lib/plucky/query.rb
+++ b/lib/plucky/query.rb
@@ -14,7 +14,7 @@ module Plucky
attr_reader :criteria, :options, :collection
def_delegator :criteria, :simple?
def_delegator :options, :fields?
- def_delegators :to_a, :each, :include?
+ def_delegators :to_a, :include?
def initialize(collection, opts={})
@collection, @options, @criteria = collection, OptionsHash.new, CriteriaHash.new
@@ -84,6 +84,10 @@ module Plucky
clone.update(opts).reverse.find_one
end
+ def each
+ find_each.each { |doc| yield(doc) }
+ end
+
def remove(opts={})
query = clone.update(opts)
query.collection.remove(query.criteria.to_hash)
@@ -157,7 +161,7 @@ module Plucky
alias :exist? :exists?
def to_a
- all
+ find_each.to_a
end
def [](key) | Optimize each and to_a to use find_each instead of all.
all loads every doc into memory, find_each creates a cursor and loads as you keep reqeusting. | mongomapper_plucky | train | rb |
ba679b4999ec805c699c50bac318230c258e0448 | diff --git a/src/fa/passwords.php b/src/fa/passwords.php
index <HASH>..<HASH> 100644
--- a/src/fa/passwords.php
+++ b/src/fa/passwords.php
@@ -17,4 +17,5 @@ return [
'sent' => 'لینک بازگردانی گذرواژه به ایمیل شما ارسال شد.',
'token' => 'مشخصهی بازگردانی گذرواژه معتبر نیست.',
'user' => 'ما کاربری با این نشانی ایمیل نداریم!',
+ 'throttled' => 'پیش از تلاش مجدد کمی صبر کنید.',
]; | Update passwords.php
Add new vars for <I>. | caouecs_Laravel-lang | train | php |
0580574d55a89233c473cbe653ea57c7d594904c | diff --git a/lib/nodes/ObjectExpression.js b/lib/nodes/ObjectExpression.js
index <HASH>..<HASH> 100644
--- a/lib/nodes/ObjectExpression.js
+++ b/lib/nodes/ObjectExpression.js
@@ -55,7 +55,7 @@ var ObjectExpression = module.exports = Base.extend({
// As we don't keep reference to the parent, just update properties so the object stay
// the same reference.
- delete this.node.property;
+ delete this.node.properties;
delete this.node.type;
this.node.TEMP = false;
_.extend(this.node, val); | ObjectExpressions have a 'properties' not 'property' property | SBoudrias_AST-query | train | js |
e29ce5c2fa88100e3db9ad8b33e1fc809f5d1df7 | diff --git a/src/attributes.js b/src/attributes.js
index <HASH>..<HASH> 100644
--- a/src/attributes.js
+++ b/src/attributes.js
@@ -12,9 +12,31 @@ export class AttributesCustomAttribute {
}
valueChanged() {
- Object.keys(this.value).forEach(attribute => {
+ Object.keys(normalizeAtttibutes(this.value)).forEach(attribute => {
this.element.setAttribute(attribute, this.value[attribute]);
});
}
}
+
+/**
+ * @param {object|string|string[]} value
+ * @returns {object} where all the values are strings or boolean
+ */
+function normalizeAtttibutes(value, result = {}) {
+ if (typeof this.value === 'string') {
+ result[this.value] = true;
+
+ return result;
+ }
+
+ if (Array.isArray(this.value)) {
+ this.value.forEach(v => {
+ result = normalizeAtttibutes(v, result);
+ });
+
+ return result;
+ }
+
+ return result;
+} | feat(attributes): normalize attributes to an object
One can now pass a string, array of strings or an object with string or boolean
values to the attributes form-element property. | SpoonX_aurelia-form | train | js |
a3612723c20405d481624dcb9faf4b36c86b5935 | diff --git a/lib/laser/types/types.rb b/lib/laser/types/types.rb
index <HASH>..<HASH> 100644
--- a/lib/laser/types/types.rb
+++ b/lib/laser/types/types.rb
@@ -57,7 +57,7 @@ module Laser
def possible_classes
case variance
- when :invariant then SexpAnalysis::ClassRegistry[class_name]
+ when :invariant then [SexpAnalysis::ClassRegistry[class_name]]
when :covariant then SexpAnalysis::ClassRegistry[class_name].subset
when :contravariant then SexpAnalysis::ClassRegistry[class_name].superset
end | Whoops: invariants need to return a singleton list, not just the matching class. | michaeledgar_laser | train | rb |
1ed34bbc50eab74f5b5abbfcbebea71c9f1cb759 | diff --git a/core/src/main/java/org/xillium/core/conf/TextResource.java b/core/src/main/java/org/xillium/core/conf/TextResource.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/org/xillium/core/conf/TextResource.java
+++ b/core/src/main/java/org/xillium/core/conf/TextResource.java
@@ -6,7 +6,7 @@ package org.xillium.core.conf;
*/
public class TextResource {
public final String name;
- public String text;
+ public String text = ""; // an empty string can go into map, null can't
public TextResource(String n) {
name = n; | TextResource fixed to allow empty strings | brianwhu_xillium | train | java |
a15e05c326d69ba489c22fed6907511c8167aa00 | diff --git a/test/akismet_test.rb b/test/akismet_test.rb
index <HASH>..<HASH> 100644
--- a/test/akismet_test.rb
+++ b/test/akismet_test.rb
@@ -1,6 +1,6 @@
require 'test_helper'
-class AkismetTest < Test
+class AkismetTest < Minitest::Test
def setup
Akismet.api_key = API_KEY
diff --git a/test/client_test.rb b/test/client_test.rb
index <HASH>..<HASH> 100644
--- a/test/client_test.rb
+++ b/test/client_test.rb
@@ -1,7 +1,7 @@
require 'test_helper'
require 'date'
-class ClientTest < Test
+class ClientTest < Minitest::Test
APP_URL = 'http://example.com'
APP_NAME = 'Akismet tests'
diff --git a/test/test_helper.rb b/test/test_helper.rb
index <HASH>..<HASH> 100644
--- a/test/test_helper.rb
+++ b/test/test_helper.rb
@@ -3,4 +3,3 @@ require 'akismet'
API_KEY = ENV['AKISMET_API_KEY'] || raise("Set the AKISMET_API_KEY environment variable to an API key obtained from akismet.com")
-Test = defined?(Minitest::Test) ? Minitest::Test : MiniTest::Unit::TestCase | Always use Minitest::Test | jonahb_akismet | train | rb,rb,rb |
aa758e64f70ba69c819e76002bbcf0eb79c314e7 | diff --git a/pkg/api/types.go b/pkg/api/types.go
index <HASH>..<HASH> 100644
--- a/pkg/api/types.go
+++ b/pkg/api/types.go
@@ -1375,7 +1375,7 @@ type NodeAddress struct {
}
// NodeResources is an object for conveying resource information about a node.
-// see http://docs.k8s.io/resources.md for more details.
+// see http://docs.k8s.io/design/resources.md for more details.
type NodeResources struct {
// Capacity represents the available resources of a node
Capacity ResourceList `json:"capacity,omitempty"` | update the links in pkg/api/types.go | kubernetes_kubernetes | train | go |
fcaeef5c9e6eeae8679f613e00a0609819127c66 | diff --git a/ores/about.py b/ores/about.py
index <HASH>..<HASH> 100644
--- a/ores/about.py
+++ b/ores/about.py
@@ -1,5 +1,5 @@
__name__ = "ores"
-__version__ = "0.9.1"
+__version__ = "1.0.0"
__author__ = "Aaron Halfaker"
__author_email__ = "ahalfaker@wikimedia.org"
__description__ = "A webserver for hosting scorer models." | Increments version to <I> | wikimedia_ores | train | py |
f30325849acf4f8f26522441ac0321466687d731 | diff --git a/client/modules/crm/src/views/dashlets/activities.js b/client/modules/crm/src/views/dashlets/activities.js
index <HASH>..<HASH> 100644
--- a/client/modules/crm/src/views/dashlets/activities.js
+++ b/client/modules/crm/src/views/dashlets/activities.js
@@ -129,7 +129,7 @@ Espo.define('crm:views/dashlets/activities', ['views/dashlets/abstract/base', 'm
this.collection = new MultiCollection();
this.collection.seeds = this.seeds;
this.collection.url = 'Activities/action/listUpcoming';
- this.collection.maxSize = this.getConfig().get('recordsPerPageSmall') || 5;
+ this.collection.maxSize = this.getOption('displayRecords') || this.getConfig().get('recordsPerPageSmall') || 5;
this.collection.data.entityTypeList = this.scopeList;
this.listenToOnce(this.collection, 'sync', function () { | Activities Dashlet: fix Display Records option (#<I>) | espocrm_espocrm | train | js |
8728620421552c5407f240d4759ada6bf0ccc988 | diff --git a/tests/tabix_test.py b/tests/tabix_test.py
index <HASH>..<HASH> 100644
--- a/tests/tabix_test.py
+++ b/tests/tabix_test.py
@@ -78,6 +78,15 @@ class TestIndexing(unittest.TestCase):
pysam.tabix_index(self.tmpfilename, preset="gff")
self.assertTrue(checkBinaryEqual(self.tmpfilename + ".tbi", self.filename_idx))
+ def test_indexing_to_custom_location_works(self):
+ '''test indexing a file with a non-default location.'''
+
+ index_path = get_temp_filename(suffix='custom.tbi')
+ pysam.tabix_index(self.tmpfilename, preset="gff", index=index_path, force=True)
+ self.assertTrue(checkBinaryEqual(index_path, self.filename_idx))
+ os.unlink(index_path)
+
+
def test_indexing_with_explict_columns_works(self):
'''test indexing via preset.'''
@@ -101,7 +110,8 @@ class TestIndexing(unittest.TestCase):
def tearDown(self):
os.unlink(self.tmpfilename)
- os.unlink(self.tmpfilename + ".tbi")
+ if os.path.exists(self.tmpfilename + ".tbi"):
+ os.unlink(self.tmpfilename + ".tbi")
class TestCompression(unittest.TestCase): | Add test for indexing tabix file to custom location | pysam-developers_pysam | train | py |
afd6e55cf7fcc8e9377aa75666192b279075c5e8 | diff --git a/src/main/java/org/jgroups/protocols/kubernetes/KUBE_PING.java b/src/main/java/org/jgroups/protocols/kubernetes/KUBE_PING.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/jgroups/protocols/kubernetes/KUBE_PING.java
+++ b/src/main/java/org/jgroups/protocols/kubernetes/KUBE_PING.java
@@ -193,11 +193,6 @@ public class KUBE_PING extends Discovery {
|| System.getenv(property_name) != null;
}
- @Override public void destroy() {
- client=null;
- super.destroy();
- }
-
private PhysicalAddress getCurrentPhysicalAddress(Address addr) {
return (PhysicalAddress)down(new Event(Event.GET_PHYSICAL_ADDRESS, addr));
} | Resolves #<I>: Occasional NPE on shutdown: remove unnecessary nulling of fields in Protocol.destroy() | jgroups-extras_jgroups-kubernetes | train | java |
6cf1f1e75d53e6a6094c9880c5126b86e048e7b6 | diff --git a/okhttp-dnsoverhttps/src/test/java/okhttp3/dnsoverhttps/DohProviders.java b/okhttp-dnsoverhttps/src/test/java/okhttp3/dnsoverhttps/DohProviders.java
index <HASH>..<HASH> 100644
--- a/okhttp-dnsoverhttps/src/test/java/okhttp3/dnsoverhttps/DohProviders.java
+++ b/okhttp-dnsoverhttps/src/test/java/okhttp3/dnsoverhttps/DohProviders.java
@@ -106,8 +106,8 @@ public class DohProviders {
if (!workingOnly) {
//result.add(buildCleanBrowsing(client)); // timeouts
result.add(buildCryptoSx(client)); // 521 - server down
- result.add(buildChantra(client)); // 400
}
+ result.add(buildChantra(client));
return result;
} | Enable chantra for DNS over HTTPS testing | square_okhttp | train | java |
90abc538ece5f745eb7569b7bec43625f902d325 | diff --git a/client/server/pages/index.js b/client/server/pages/index.js
index <HASH>..<HASH> 100644
--- a/client/server/pages/index.js
+++ b/client/server/pages/index.js
@@ -631,7 +631,8 @@ export default function pages() {
app.get( '/plans', function ( req, res, next ) {
if ( ! req.context.isLoggedIn ) {
- const queryFor = req.query && req.query.for;
+ const queryFor = req.query?.for;
+
if ( queryFor && 'jetpack' === queryFor ) {
res.redirect(
'https://wordpress.com/wp-login.php?redirect_to=https%3A%2F%2Fwordpress.com%2Fplans'
@@ -639,9 +640,9 @@ export default function pages() {
} else if ( ! config.isEnabled( 'jetpack-cloud/connect' ) ) {
res.redirect( 'https://wordpress.com/pricing' );
}
- } else {
- next();
}
+
+ next();
} );
} | Fix `/plans` route not rendering in Jetpack cloud (#<I>) | Automattic_wp-calypso | train | js |
0b62513fa20ea3975a580e5b89dad265f54d6112 | diff --git a/src/main/java/org/junit/Assert.java b/src/main/java/org/junit/Assert.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/junit/Assert.java
+++ b/src/main/java/org/junit/Assert.java
@@ -127,13 +127,10 @@ public class Assert {
}
private static boolean equalsRegardingNull(Object expected, Object actual) {
- if (expected == null && actual == null)
- return true;
- if (expected != null && isEquals(expected, actual))
- return true;
+ if (expected == null)
+ return actual == null;
- return false;
-
+ return isEquals(expected, actual);
}
private static boolean isEquals(Object expected, Object actual) { | Simplifying isEqualsRegardingNull. | junit-team_junit4 | train | java |
d97f2ef3ec74d3bf7d486430781f9d248e070672 | diff --git a/lib/discordrb/data/channel.rb b/lib/discordrb/data/channel.rb
index <HASH>..<HASH> 100644
--- a/lib/discordrb/data/channel.rb
+++ b/lib/discordrb/data/channel.rb
@@ -785,7 +785,7 @@ module Discordrb
end
def update_channel_data(new_data)
- new_nsfw = new_data[:nsfw].is_a?(TrueClass) || new_data[:nsfw].is_a?(FalseClass) ? new_nsfw : @nsfw
+ new_nsfw = new_data[:nsfw].is_a?(TrueClass) || new_data[:nsfw].is_a?(FalseClass) ? new_data[:nsfw] : @nsfw
# send permission_overwrite only when explicitly set
overwrites = new_data[:permission_overwrites] ? new_data[:permission_overwrites].map { |_, v| v.to_hash } : nil
response = JSON.parse(API::Channel.update(@bot.token, @id, | Fix nsfw value not being passed to API call (#<I>) | meew0_discordrb | train | rb |
79864b88fd84f0558780243a8735d59c33521c4a | diff --git a/modules/client/src/main/java/org/jboss/wsf/stack/cxf/client/ServiceObjectFactory.java b/modules/client/src/main/java/org/jboss/wsf/stack/cxf/client/ServiceObjectFactory.java
index <HASH>..<HASH> 100644
--- a/modules/client/src/main/java/org/jboss/wsf/stack/cxf/client/ServiceObjectFactory.java
+++ b/modules/client/src/main/java/org/jboss/wsf/stack/cxf/client/ServiceObjectFactory.java
@@ -96,17 +96,17 @@ public class ServiceObjectFactory implements ObjectFactory
UnifiedServiceRefMetaData serviceRef = unmarshallServiceRef(ref);
Bus bus;
+ //Reset bus before constructing Service
+ BusFactory.setThreadDefaultBus(null);
URL cxfConfig = getCXFConfiguration(serviceRef.getVfsRoot());
if (cxfConfig != null)
{
SpringBusFactory busFactory = new SpringBusFactory();
bus = busFactory.createBus(cxfConfig);
- BusFactory.setDefaultBus(bus);
+ BusFactory.setThreadDefaultBus(bus);
}
else
{
- //Reset bus before constructing Service
- BusFactory.setThreadDefaultBus(null);
bus = BusFactory.getThreadDefaultBus();
} | [JBWS-<I>] Setting new bus read from configuration to thread local only, do not change the BusFactory static default bus | jbossws_jbossws-cxf | train | java |
79dc087aede6a36cd5e15d26110cc4fa2df1fe47 | diff --git a/client/lib/wpcom-undocumented/lib/undocumented.js b/client/lib/wpcom-undocumented/lib/undocumented.js
index <HASH>..<HASH> 100644
--- a/client/lib/wpcom-undocumented/lib/undocumented.js
+++ b/client/lib/wpcom-undocumented/lib/undocumented.js
@@ -2352,6 +2352,34 @@ Undocumented.prototype.transferStatus = function( siteId, transferId ) {
};
/**
+ * Update the poster for a video.
+ *
+ * @param {string} videoId ID of the video
+ * @param {object} data The POST data
+ * @param {Function} fn Function to invoke when request is complete
+ * @returns {Promise} A promise that resolves when the request is complete
+ */
+Undocumented.prototype.updateVideoPoster = function( videoId, data, fn ) {
+ debug( '/videos/:video_id/poster' );
+
+ const params = {
+ path: `/videos/${ videoId }/poster`,
+ };
+
+ if ( 'file' in data ) {
+ params.formData = [
+ [ 'poster', data.file ]
+ ];
+ }
+
+ if ( 'at_time' in data ) {
+ params.body = data;
+ }
+
+ return this.wpcom.req.post( params, fn );
+};
+
+/**
* Expose `Undocumented` module
*/
module.exports = Undocumented; | Add undocumented endpoint for updating video poster | Automattic_wp-calypso | train | js |
6b7f3229acd714bb1b7341e0d719e2167bbabbe5 | diff --git a/audio/ulaw/ulaw.go b/audio/ulaw/ulaw.go
index <HASH>..<HASH> 100644
--- a/audio/ulaw/ulaw.go
+++ b/audio/ulaw/ulaw.go
@@ -19,8 +19,12 @@ func WriteFileWavFromUlaw(filename string, ulawBytes []byte) error {
if err != nil {
return err
}
+ defer f.Close()
_, err = WriteWavFromUlaw(f, ulawBytes)
- return err
+ if err != nil {
+ return err
+ }
+ return f.Sync()
}
func WriteWavFromUlaw(w io.Writer, ulawBytes []byte) (n int, err error) { | enhance: audio/ulaw: update file close | grokify_gotilla | train | go |
ba61f06f09414af8dabd8431840818b46d2cf06a | diff --git a/smashrun/client.py b/smashrun/client.py
index <HASH>..<HASH> 100644
--- a/smashrun/client.py
+++ b/smashrun/client.py
@@ -223,7 +223,7 @@ class Smashrun(object):
return r
def _iter(self, url, count, cls=None, **kwargs):
- page = 0
+ page = None if count is None else 0
while True:
kwargs.update(count=count, page=page)
r = self.session.get(url, params=kwargs)
@@ -236,6 +236,8 @@ class Smashrun(object):
yield cls(d)
else:
yield d
+ if page is None:
+ return
page += 1
def _build_url(self, *args, **kwargs): | Add the ability to fetch all activities without paging | campbellr_smashrun-client | train | py |
c74a799ce1bdcc36d425d796c35767be9693240f | diff --git a/news-bundle/contao/classes/News.php b/news-bundle/contao/classes/News.php
index <HASH>..<HASH> 100644
--- a/news-bundle/contao/classes/News.php
+++ b/news-bundle/contao/classes/News.php
@@ -279,6 +279,12 @@ class News extends \Frontend
continue;
}
+ // The target page is exempt from the sitemap (see #6418)
+ if ($blnIsSitemap && $objParent->sitemap == 'map_never')
+ {
+ continue;
+ }
+
if ($objParent->domain != '')
{
$domain = (\Environment::get('ssl') ? 'https://' : 'http://') . $objParent->domain . TL_PATH . '/'; | [News] Do not add news and event URLs to the sitemap if the target page is exempt from the sitemap (see #<I>) | contao_contao | train | php |
e735ee56c28a15aaa8d1e61d0058256a6229f788 | diff --git a/src/index.js b/src/index.js
index <HASH>..<HASH> 100644
--- a/src/index.js
+++ b/src/index.js
@@ -20,8 +20,12 @@ const checkResponse = async response => {
const isOK =
!response.headers.has('x-status-code') ||
parseInt(response.headers.get('x-status-code'), 10) < 300;
- if (response.ok && isOK) return response.json();
- throw await response.json();
+ if (response.headers.get('content-type').indexOf('application/json') > -1) {
+ if (response.ok && isOK) return response.json();
+ throw await response.json();
+ }
+ if (response.ok && isOK) return response.text();
+ throw await response.text();
};
const doFetch = ( | Only parse JSON if response declares it to be JSON
If it's not JSON, return the body text. | hp-mobile_js-fetch | train | js |
f14b36c1c5f1c1a490fc15ca6769fd724f08e81a | diff --git a/spyder/plugins/plots/widgets/figurebrowser.py b/spyder/plugins/plots/widgets/figurebrowser.py
index <HASH>..<HASH> 100644
--- a/spyder/plugins/plots/widgets/figurebrowser.py
+++ b/spyder/plugins/plots/widgets/figurebrowser.py
@@ -212,7 +212,8 @@ class FigureBrowser(QWidget, SpyderWidgetMixin):
The available splitter width.
"""
min_sb_width = self.thumbnails_sb._min_scrollbar_width
- self.splitter.setSizes([base_width - min_sb_width, min_sb_width])
+ if base_width - min_sb_width > 0:
+ self.splitter.setSizes([base_width - min_sb_width, min_sb_width])
def show_fig_outline_in_viewer(self, state):
"""Draw a frame around the figure viewer if state is True.""" | Prevent setting negative sizes in the plots pane | spyder-ide_spyder | train | py |
3d2153f5d0dd0542f276946c07edba3a9f33c3c7 | diff --git a/src/PHPCoverFish/CoverFishScanCommand.php b/src/PHPCoverFish/CoverFishScanCommand.php
index <HASH>..<HASH> 100644
--- a/src/PHPCoverFish/CoverFishScanCommand.php
+++ b/src/PHPCoverFish/CoverFishScanCommand.php
@@ -196,11 +196,15 @@ class CoverFishScanCommand extends Command
);
try {
- $scanner = new CoverFishScanner($cliOptions, $outOptions);
+
+ $scanner = new CoverFishScanner($cliOptions, $outOptions, $output);
$scanner->analysePHPUnitFiles();
+
} catch (CoverFishFailExit $e) {
- die(CoverFishFailExit::RETURN_CODE_SCAN_FAIL);
+ return CoverFishFailExit::RETURN_CODE_SCAN_FAIL;
}
+
+ return 0;
}
/** | fix problem in exit code behaviour during scan fails and introduce output interface usage as standard out | dunkelfrosch_phpcoverfish | train | php |
c78e0516c19a72dd75aa258d55c1ede43b1ed9c2 | diff --git a/src/DeepCopy/DeepCopy.php b/src/DeepCopy/DeepCopy.php
index <HASH>..<HASH> 100644
--- a/src/DeepCopy/DeepCopy.php
+++ b/src/DeepCopy/DeepCopy.php
@@ -140,8 +140,8 @@ class DeepCopy
return $var;
}
- // PHP 8.1 Enum
- if (function_exists('enum_exists') && enum_exists($var::class)) {
+ // Enum
+ if (PHP_VERSION_ID >= 80100 && enum_exists($var::class)) {
return $var;
} | enum_exists may have been manually declared on lower PHP versions, use strict PHP version check | myclabs_DeepCopy | train | php |
14dd1a91a9e52ffdf782ee25cce59b41fcf9d0c8 | diff --git a/src/Symfony/Component/Routing/Loader/AnnotationDirectoryLoader.php b/src/Symfony/Component/Routing/Loader/AnnotationDirectoryLoader.php
index <HASH>..<HASH> 100644
--- a/src/Symfony/Component/Routing/Loader/AnnotationDirectoryLoader.php
+++ b/src/Symfony/Component/Routing/Loader/AnnotationDirectoryLoader.php
@@ -38,7 +38,12 @@ class AnnotationDirectoryLoader extends AnnotationFileLoader
$collection = new RouteCollection();
$collection->addResource(new DirectoryResource($dir, '/\.php$/'));
- foreach (new \RecursiveIteratorIterator(new \RecursiveDirectoryIterator($dir), \RecursiveIteratorIterator::LEAVES_ONLY) as $file) {
+ $files = iterator_to_array(new \RecursiveIteratorIterator(new \RecursiveDirectoryIterator($dir), \RecursiveIteratorIterator::LEAVES_ONLY));
+ usort($files, function (\SplFileInfo $a, \SplFileInfo $b) {
+ return (string) $a > (string) $b ? 1 : -1;
+ });
+
+ foreach ($files as $file) {
if (!$file->isFile() || '.php' !== substr($file->getFilename(), -4)) {
continue;
} | [Routing] made AnnotationDirectoryLoader deterministic (closes #<I>) | symfony_symfony | train | php |
a97ed1f397d9e01e6652730da3e0383dd52efbdf | diff --git a/test/test_builder.py b/test/test_builder.py
index <HASH>..<HASH> 100644
--- a/test/test_builder.py
+++ b/test/test_builder.py
@@ -31,7 +31,7 @@ class TestMethods(base.SchemaBuilderTestCase):
def test_to_json(self):
self.assertEqual(
self.builder.to_json(),
- '{"$schema": "%builder"}' % SchemaBuilder.DEFAULT_URI)
+ '{"$schema": "%s"}' % SchemaBuilder.DEFAULT_URI)
def test_add_schema_with_uri_default(self):
test_uri = 'TEST_URI' | oops, I renamed one thing I shouldn't have | wolverdude_GenSON | train | py |
725ce362307623caf97aac648b17c57e429537c9 | diff --git a/pymouse/x11.py b/pymouse/x11.py
index <HASH>..<HASH> 100644
--- a/pymouse/x11.py
+++ b/pymouse/x11.py
@@ -133,6 +133,7 @@ class PyMouseEvent(PyMouseEventMeta):
self.stop()
def stop(self):
+ self.state = False
self.display.flush()
self.display.record_disable_context(self.ctx)
self.display.ungrab_pointer(X.CurrentTime) | X<I> now properly toggles the state off in PyKeyboardEvent with stop() | SavinaRoja_PyUserInput | train | py |
447f2dc9e80a2dedbeadcb54de150933037bf8b1 | diff --git a/sample.go b/sample.go
index <HASH>..<HASH> 100644
--- a/sample.go
+++ b/sample.go
@@ -1,5 +1,17 @@
package zipkintracer
+import (
+ "github.com/openzipkin/zipkin-go"
+)
+
// Sampler functions return if a Zipkin span should be sampled, based on its
// traceID.
type Sampler func(id uint64) bool
+
+var (
+ NeverSample = zipkin.NeverSample
+ AlwaysSample = zipkin.AlwaysSample
+ NewModuloSampler = zipkin.NewModuloSampler
+ NewBoundarySampler = zipkin.NewBoundarySampler
+ NewCountingSampler = zipkin.NewCountingSampler
+) | feat: adds support for samplers. | openzipkin-contrib_zipkin-go-opentracing | train | go |
9554cb690d18a1eb2e0e805bb81012d3e8ab4915 | diff --git a/_config.php b/_config.php
index <HASH>..<HASH> 100644
--- a/_config.php
+++ b/_config.php
@@ -15,4 +15,5 @@ if(!class_exists("GridField")) {
Object::add_extension("FormField", "BootstrapFormField");
Object::add_extension("TextField", "BootstrapTextField");
Object::add_extension("OptionsetField", "BootstrapOptionsetField");
-Object::add_extension("FormAction","BootstrapFormAction");
\ No newline at end of file
+Object::add_extension("FormAction","BootstrapFormAction");
+Object::add_extension("TextareaField", "BootstrapTextField"); | Added decorator to TextareaField | unclecheese_silverstripe-bootstrap-forms | train | php |
39f77c3d733f43bc23cd8a9fccf8fdf04b5b4c95 | diff --git a/src/NGrams/Statistic.php b/src/NGrams/Statistic.php
index <HASH>..<HASH> 100644
--- a/src/NGrams/Statistic.php
+++ b/src/NGrams/Statistic.php
@@ -127,6 +127,21 @@ class Statistic
}
/**
+ * Calculate the T-score
+ * @param array $ngram Array of ngrams with frequencies
+ * @return float Return the calculated value
+ */
+ public function tscore(array $ngram) : float
+ {
+ $var = $this->setStatVariables($ngram);
+
+ $term1 = $var['jointFrequency'] - (($var['leftFrequency'] * $var['rightFrequency'])/$this->totalBigrams);
+ $term2 = sqrt(($var['jointFrequency']));
+
+ return ( $term1 / $term2 );
+ }
+
+ /**
* Calculate the Pointwise mutual information
* @param int $n
* @param int $m | Added T-score measure
Added T-score measure. | yooper_php-text-analysis | train | php |
a5fb15951ba304b103adb7d8c94d7dafaa3c4fe3 | diff --git a/lib/right_chimp/objects/ChimpObjects.rb b/lib/right_chimp/objects/ChimpObjects.rb
index <HASH>..<HASH> 100644
--- a/lib/right_chimp/objects/ChimpObjects.rb
+++ b/lib/right_chimp/objects/ChimpObjects.rb
@@ -51,7 +51,7 @@ module Chimp
@client = RightApi::Client.new(:email => creds[:user], :password => creds[:pass],
:account_id => creds[:account], :api_url => creds[:api_url],
- :timeout => nil)
+ :timeout => nil, :enable_retry => true )
rescue
puts "##############################################################################"
puts "Error: " | OPS-<I> Now right_api_client has retry enabled | rightscale_right_chimp | train | rb |
ec067a714dbce0ac45d3dc7f46c47db73a709dc6 | diff --git a/src/scout_apm/commands.py b/src/scout_apm/commands.py
index <HASH>..<HASH> 100644
--- a/src/scout_apm/commands.py
+++ b/src/scout_apm/commands.py
@@ -128,7 +128,7 @@ class BatchCommand:
def message(self):
messages = list(map(lambda cmd: cmd.message(), self.commands))
- return {'BatchCommand': messages}
+ return {'BatchCommand': {'commands': messages}}
@classmethod
def from_tracked_request(cls, request): | Fix structure mismatch in BatchCommand | scoutapp_scout_apm_python | train | py |
255fe041f07d3b5caf421d89522a2b58234563fe | diff --git a/src/BankBillet/Controller.php b/src/BankBillet/Controller.php
index <HASH>..<HASH> 100644
--- a/src/BankBillet/Controller.php
+++ b/src/BankBillet/Controller.php
@@ -43,7 +43,8 @@ class Controller
$bank = $title->assignment->bank;
- $view_class = __NAMESPACE__ . '\\Views\\' . $bank->view;
+ $view_class = __NAMESPACE__ . '\\Views\\'
+ . BankInterchange\Utils::toPascalCase($bank->name);
$this->view = new $view_class($title, $data, $logos);
} | Update BankBillet Controller
Use bank name in PascalCase to select the billet view | aryelgois_bank-interchange | train | php |
fd6c03a48c61f697edb5662fdf0aae33cbc43b57 | diff --git a/host/daq/readout_utils.py b/host/daq/readout_utils.py
index <HASH>..<HASH> 100644
--- a/host/daq/readout_utils.py
+++ b/host/daq/readout_utils.py
@@ -34,6 +34,9 @@ def interpret_pixel_data(data, dc, pixel_array, invert=True):
address_split = np.array_split(address, np.where(np.diff(address.astype(np.int32)) < 0)[0] + 1)
value_split = np.array_split(value, np.where(np.diff(address.astype(np.int32)) < 0)[0] + 1)
+ if len(address_split) > 5:
+ raise NotImplementedError('Only the data from one double column can be interpreted at once!')
+
mask = np.empty_like(pixel_array.data) # BUG in numpy: pixel_array is de-masked if not .data is used
mask[:] = len(address_split) | ENH: sanity check added | SiLab-Bonn_pyBAR | train | py |
69bdd06d101581255ace86df8a23b3296d9daacd | diff --git a/snekchek/format.py b/snekchek/format.py
index <HASH>..<HASH> 100644
--- a/snekchek/format.py
+++ b/snekchek/format.py
@@ -10,5 +10,10 @@ def vulture_format(data):
def pylint_format(data):
+ last_path = ""
for row in data:
+ if row['path'] != last_path:
+ print(f"File: {row['path']}")
+ last_path = row['path']
+
print(f"{row['message-id'][0]}:{row['line']:>3}, {row['column']:>2}: {row['message']} ({row['symbol']})") | Add file to pylint output | IzunaDevs_SnekChek | train | py |
7187827fa195f4423bf280a62229147c7787615b | diff --git a/examples/example_01_connectivity.py b/examples/example_01_connectivity.py
index <HASH>..<HASH> 100644
--- a/examples/example_01_connectivity.py
+++ b/examples/example_01_connectivity.py
@@ -49,7 +49,7 @@ We simply choose a VAR model order of 30, and reduction to 4 components (that's
api = scot.SCoT(30, reducedim=4, locations=locs)
"""
-Perform MVARICA
+Perform MVARICA and plot the components
"""
api.setData(data)
@@ -60,11 +60,8 @@ api.plotComponents()
"""
Connectivity Analysis
-We will extract the full frequency directed transfer function (ffDTF) from the
+Extract the full frequency directed transfer function (ffDTF) from the
activations of each class and plot them with matplotlib.
-
-We define a function "topo" that multiplot2 calls to draws scalp projections
-of the components.
"""
api.setData(data, classes) | Improled comments a bit
Former-commit-id: <I>b<I>fe2f6a<I>de<I>fac<I>ca2c<I>f9ff<I> | scot-dev_scot | train | py |
30366bcbb37d7e26f80055a6202d5b527abf2a85 | diff --git a/autocompletefile.go b/autocompletefile.go
index <HASH>..<HASH> 100644
--- a/autocompletefile.go
+++ b/autocompletefile.go
@@ -81,24 +81,12 @@ func (self *AutoCompleteFile) processDecl(decl ast.Decl) {
return
}
- methodof := MethodOf(decl)
- if methodof != "" {
- decl, ok := self.decls[methodof]
- if ok {
- decl.AddChild(d)
- } else {
- decl = NewDecl(methodof, DECL_METHODS_STUB, self.scope)
- self.decls[methodof] = decl
- decl.AddChild(d)
- }
- } else {
- // the declaration itself has a scope which follows it's definition
- // and it's false for type declarations
- if d.Class != DECL_TYPE {
- self.scope = NewScope(self.scope)
- }
- self.scope.addNamedDecl(d)
+ // the declaration itself has a scope which follows it's definition
+ // and it's false for type declarations
+ if d.Class != DECL_TYPE {
+ self.scope = NewScope(self.scope)
}
+ self.scope.addNamedDecl(d)
})
} | Remove method variant from local declarations processing.
Because it's invalid anyway. | nsf_gocode | train | go |
a01f71eb243370bb44337bd0518400b2515b9167 | diff --git a/lib/model/person.js b/lib/model/person.js
index <HASH>..<HASH> 100644
--- a/lib/model/person.js
+++ b/lib/model/person.js
@@ -36,7 +36,8 @@ Person.schema = ActivityObject.subSchema(["attachments",
["followers",
"following",
"favorites",
- "lists"]);
+ "lists"],
+ ["image.url"]);
Person.pkey = function() {
return "id"; | Person has an index on image.url | pump-io_pump.io | train | js |
fc57d6ea78fbcaf2b2484e1ddb03d648c6881cde | diff --git a/spec/swag_dev/project/tools_provider_spec.rb b/spec/swag_dev/project/tools_provider_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/swag_dev/project/tools_provider_spec.rb
+++ b/spec/swag_dev/project/tools_provider_spec.rb
@@ -10,6 +10,7 @@ describe SwagDev::Project::ToolsProvider, :tools_provider do
it { expect(subject).to respond_to(:get).with(1).arguments }
it { expect(subject).to respond_to(:fetch).with(1).arguments }
it { expect(subject).to respond_to('[]').with(1).arguments }
+ it { expect(subject).to respond_to('[]=').with(2).arguments }
it { expect(subject).to respond_to('member?').with(1).arguments }
it { expect(subject).to respond_to('merge!').with(1).arguments }
end | tools_provider (spec) example added | SwagDevOps_kamaze-project | train | rb |
687f299c4f396c87c0365d48ab628aebbf0fabaa | diff --git a/application/Config/Database.php b/application/Config/Database.php
index <HASH>..<HASH> 100644
--- a/application/Config/Database.php
+++ b/application/Config/Database.php
@@ -1,5 +1,6 @@
<?php namespace Config;
+use CodeIgniter\CLI\CLI;
use phpDocumentor\Reflection\DocBlock\Tag\VarTag;
/**
@@ -107,6 +108,9 @@ class Database extends \CodeIgniter\Database\Config
}
}
}
+
+ CLI::write('ENV='.ENVIRONMENT);
+ CLI::write('GROUP='.$group);
}
//-------------------------------------------------------------------- | Add some logging just to determine what Travis is doing | codeigniter4_CodeIgniter4 | train | php |
f4e3d2e0f278cb6f539d54d61c2612528718ba5b | diff --git a/lib/sbsm/request.rb b/lib/sbsm/request.rb
index <HASH>..<HASH> 100644
--- a/lib/sbsm/request.rb
+++ b/lib/sbsm/request.rb
@@ -80,7 +80,7 @@ module SBSM
'session_path' => '/',
}
if(is_crawler?)
- sid = [ENV['DEFAULT_FLAVOR'], @cgi.user_agent].join('-')
+ sid = [ENV['DEFAULT_FLAVOR'], @cgi.params['language'], @cgi.user_agent].join('-')
args.store('session_id', sid)
end
@session = CGI::Session.new(@cgi, args) | Keep crawler-sessions for different languages separate. | zdavatz_sbsm | train | rb |
9dc7ab801d12fdb33e21e369e0e7774e606e700d | diff --git a/lib/config.js b/lib/config.js
index <HASH>..<HASH> 100644
--- a/lib/config.js
+++ b/lib/config.js
@@ -132,9 +132,11 @@ config.reload = function (){
}
config.debug_configuration_file_reload = function (){
+ config.status = "loading";
var debug_config = path.join(__dirname, this.debug_service.path);
this.lookup = JSON.parse(fs.readFileSync(debug_config)).functionList;
this.refreshLookupSet();
+ config.status = "done";
}
config.debug_configuration_service_reload = function () { | Fix interval reloading from file source | HPSoftware_node-offline-debug | train | js |
bb83b47defcc984ba9b4fb8e84771f36960945fb | diff --git a/src/js/mep-player.js b/src/js/mep-player.js
index <HASH>..<HASH> 100644
--- a/src/js/mep-player.js
+++ b/src/js/mep-player.js
@@ -287,7 +287,7 @@
t.container =
$('<span class="mejs-offscreen">' + videoPlayerTitle + '</span>'+
'<div id="' + t.id + '" class="mejs-container ' + (mejs.MediaFeatures.svg ? 'svg' : 'no-svg') +
- '" tabindex="0" role="application" aria-label=' + videoPlayerTitle + '">'+
+ '" tabindex="0" role="application" aria-label="' + videoPlayerTitle + '">'+
'<div class="mejs-inner">'+
'<div class="mejs-mediaelement"></div>'+
'<div class="mejs-layers"></div>'+ | Pull #<I>, Fixing a missing quote mark - Accessibility slider control | mediaelement_mediaelement | train | js |
77d1eaeb2c4134fa9544bf70d154713993074c1a | diff --git a/kafka_utils/kafka_consumer_manager/commands/offsets_for_timestamp.py b/kafka_utils/kafka_consumer_manager/commands/offsets_for_timestamp.py
index <HASH>..<HASH> 100644
--- a/kafka_utils/kafka_consumer_manager/commands/offsets_for_timestamp.py
+++ b/kafka_utils/kafka_consumer_manager/commands/offsets_for_timestamp.py
@@ -83,6 +83,19 @@ class OffsetsForTimestamp(OffsetManagerBase):
@classmethod
def print_offsets(cls, partition_to_offset, orig_timestamp):
+ milliseconds_thresold = 999999999999
+ if orig_timestamp < milliseconds_thresold:
+ date = datetime.fromtimestamp(
+ orig_timestamp / 1000.0,
+ tz=pytz.timezone("US/Pacific"),
+ ).strftime("%Y-%m-%d %H:%M:%S %Z")
+ print(
+ "WARNING: Supplied timestamp {timestamp} corresponds to {datetime}, "
+ "remember that timestamp parameter needs to be in milliseconds.".format(
+ timestamp=orig_timestamp,
+ datetime=date
+ )
+ )
topics = {}
for tp, offset_timestamp in six.iteritems(partition_to_offset):
if tp.topic not in topics: | Add warning if timestamp argument in offsets_for_timestamp is in seconds | Yelp_kafka-utils | train | py |
d7e29274374916589d714451e40577f0dd188e24 | diff --git a/src/JMS/Serializer/Twig/SerializerExtension.php b/src/JMS/Serializer/Twig/SerializerExtension.php
index <HASH>..<HASH> 100644
--- a/src/JMS/Serializer/Twig/SerializerExtension.php
+++ b/src/JMS/Serializer/Twig/SerializerExtension.php
@@ -50,7 +50,7 @@ class SerializerExtension extends \Twig_Extension
public function getFunctions()
{
return array(
- new \Twig_SimpleFunction('serialization_context', '\JMS\Serializer\SerializationContext::createContext'),
+ new \Twig_SimpleFunction('serialization_context', '\JMS\Serializer\SerializationContext::create'),
);
} | Fix the method name for the serialization context factory | alekitto_serializer | train | php |
3d5de14f0d249ecf5714b433245e03ec4c18f7d7 | diff --git a/tests.py b/tests.py
index <HASH>..<HASH> 100644
--- a/tests.py
+++ b/tests.py
@@ -18,11 +18,9 @@ Options:
# erik@a8.nl (04-03-15)
# license: GNU-GPL2
-import os
-import unittest
+from unittester import run_unit_test
from arguments import Arguments
-from pyprofiler import start_profile, end_profile
-from consoleprinter import console
+
def raises_error(*args, **kwds):
"""
@@ -46,7 +44,6 @@ class ArgumentTest(unittest.TestCase):
"""
self.arguments = Arguments(__doc__)
-
def test_assert_raises(self):
"""
test_assert_raises | appinstance
Thursday <I> March <I> (week:9 day:<I>), <I>:<I>:<I> | erikdejonge_arguments | train | py |
ad352e36517514a350bb9e1288be5a6ea25cfb6b | diff --git a/test/lib/migrator_test.rb b/test/lib/migrator_test.rb
index <HASH>..<HASH> 100644
--- a/test/lib/migrator_test.rb
+++ b/test/lib/migrator_test.rb
@@ -21,7 +21,7 @@ class MigratorTest < ActiveSupport::TestCase
sub_test_case '.run_migrations' do
setup do
- FileUtils.touch Rails.root.join('db/migrate/20999999999999_create_foobars.rb')
+ File.write Rails.root.join('db/migrate/20999999999999_create_foobars.rb'), 'class CreateFoobars < ActiveRecord::VERSION::MAJOR >= 5 ? ActiveRecord::Migration[5.0] : ActiveRecord::Migration; end'
mock(ActiveRecord::Migrator).run(:up, ['db/migrate'], 20999999999999)
mock(ActiveRecord::SchemaDumper).dump(ActiveRecord::Base.connection, anything)
end | AR <I> requires each migration file to define a real migration class | amatsuda_erd | train | rb |
82283f99ac118d5ed6808d3623f78a98308945c8 | diff --git a/runtime/classes/propel/util/BasePeer.php b/runtime/classes/propel/util/BasePeer.php
index <HASH>..<HASH> 100644
--- a/runtime/classes/propel/util/BasePeer.php
+++ b/runtime/classes/propel/util/BasePeer.php
@@ -557,7 +557,7 @@ class BasePeer
$stmt->bindValue(':p'.$i++, null, PDO::PARAM_NULL);
- } else {
+ } elseif (isset($tableMap) ) {
$cMap = $dbMap->getTable($tableName)->getColumn($columnName);
$type = $cMap->getType();
@@ -591,6 +591,8 @@ class BasePeer
}
$stmt->bindValue(':p'.$i++, $value, $pdoType);
+ } else {
+ $stmt->bindValue(':p'.$i++, $value);
}
} // foreach
} | Added support for populating with params without table names, allows adding for example a count(somecol) as result to the selectcolumns of a criteria and then using addHaving(->getNewCriterion(found, 3)) | propelorm_Propel | train | php |
9468810e7e303536fb9c3f414573e545350bd657 | diff --git a/src/Illuminate/Console/Scheduling/Event.php b/src/Illuminate/Console/Scheduling/Event.php
index <HASH>..<HASH> 100644
--- a/src/Illuminate/Console/Scheduling/Event.php
+++ b/src/Illuminate/Console/Scheduling/Event.php
@@ -601,10 +601,10 @@ class Event {
{
throw new LogicException("Must direct output to a file in order to e-mail results.");
}
-
+ $addresses = is_array($addresses) ? $addresses : func_get_args();
return $this->then(function(Mailer $mailer) use ($addresses)
{
- $this->emailOutput($mailer, is_array($addresses) ? $addresses : func_get_args());
+ $this->emailOutput($mailer, $addresses);
});
} | Update Event.php
func_get_args() wrapped in an anonymous function will not receive args from method. | laravel_framework | train | php |
f4488c420f7e51eb22af6af1430558fb00791069 | diff --git a/titan-core/src/main/java/com/thinkaurelius/titan/graphdb/configuration/GraphDatabaseConfiguration.java b/titan-core/src/main/java/com/thinkaurelius/titan/graphdb/configuration/GraphDatabaseConfiguration.java
index <HASH>..<HASH> 100644
--- a/titan-core/src/main/java/com/thinkaurelius/titan/graphdb/configuration/GraphDatabaseConfiguration.java
+++ b/titan-core/src/main/java/com/thinkaurelius/titan/graphdb/configuration/GraphDatabaseConfiguration.java
@@ -415,7 +415,7 @@ public class GraphDatabaseConfiguration {
* Ganglia data. Setting this config key has no effect unless
* {@link #GANGLIA_INTERVAL} is also set.
*/
- public static final String GANGLIA_HOST_OR_GROUP = "host";
+ public static final String GANGLIA_HOST_OR_GROUP = "hostname";
/**
* The number of milliseconds to wait between sending Metrics data to the | Rename metrics.ganglia.host to .hostname
Matches storage.hostname. There's no technical reason they have to
match, I'm just trying to stick to precedent. | thinkaurelius_titan | train | java |
d281348a51f566fa6e2471296e24d465146ad492 | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -61,6 +61,7 @@ setup(
"pytz>=2017.2",
"six>=1.10.0",
"sqlalchemy>=1.3.0,<2",
+ "cached-property>=1.5.1,<2",
],
extras_require={':python_version<"3.7"': ["dataclasses>=0.6"],},
classifiers=[ | Sync setup.py with Pipfile (#<I>)
* Sync setup.py with Pipfile
* Address comment | kensho-technologies_graphql-compiler | train | py |
1a3222be5902f62b4342976b53c851012f15c519 | diff --git a/src/livestreamer/plugins/ustreamtv.py b/src/livestreamer/plugins/ustreamtv.py
index <HASH>..<HASH> 100644
--- a/src/livestreamer/plugins/ustreamtv.py
+++ b/src/livestreamer/plugins/ustreamtv.py
@@ -360,9 +360,16 @@ class UStreamTV(Plugin):
for stream_index, stream_info in enumerate(provider_streams):
stream = None
stream_height = int(stream_info.get("height", 0))
- stream_name = (stream_info.get("description") or
- (stream_height > 0 and "{0}p".format(stream_height)) or
- "live")
+ stream_name = stream_info.get("description")
+
+ if not stream_name:
+ if stream_height:
+ if not stream_info.get("isTranscoded"):
+ stream_name = "{0}p+".format(stream_height)
+ else:
+ stream_name = "{0}p".format(stream_height)
+ else:
+ stream_name = "live"
if stream_name in streams:
provider_name_clean = provider_name.replace("uhs_", "") | plugins.ustreamtv: Fix missing transcode streams.
If a transcode is the same resolution as the source stream
it does not get added to the list of streams. | streamlink_streamlink | train | py |
780eef5e5551fc9bef905624304151bd9bd27f32 | diff --git a/lib/sfrest/task.rb b/lib/sfrest/task.rb
index <HASH>..<HASH> 100644
--- a/lib/sfrest/task.rb
+++ b/lib/sfrest/task.rb
@@ -161,15 +161,13 @@ module SFRest
end
# Pauses a specific task identified by its task id.
- # CURRENTLY NOT FUNCTIONING, ISSUES WITH REST TASK-PAUSING FUNCTIONALITY.
def pause_task(task_id, level = 'family')
current_path = '/api/v1/pause/' << task_id.to_s
payload = { 'paused' => true, 'level' => level }.to_json
@conn.post(current_path, payload)
end
- # Pauses a specific task identified by its task id.
- # CURRENTLY NOT FUNCTIONING, ISSUES WITH REST TASK-PAUSING FUNCTIONALITY.
+ # Resumes a specific task identified by its task id.
def resume_task(task_id, level = 'family')
current_path = '/api/v1/pause/' << task_id.to_s
payload = { 'paused' => false, 'level' => level }.to_json | DG-<I> clean out cruft since pause and resume for a specific task works now. | acquia_sf-sdk-ruby | train | rb |
28ac6d69dc279b81ec128243645e0817d04b664f | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -24,14 +24,14 @@ setup(
'Topic :: Text Processing :: Linguistic',
'Topic :: Utilities',
],
- description=('NLP support for Classical languages.'),
+ description='NLP support for Classical languages.',
install_requires=['astroid',
'nltk',
'gnureadline',
'readline',
'requests',
'requests-toolbelt',
- 'numpy'],
+ 'numpy', 'cltk'],
keywords=['nlp', 'nltk', 'greek', 'latin'],
license='MIT',
long_description="The Classical Language Toolkit (CLTK) is a framework for natural language processing for Classical languages.", # pylint: disable=C0301 | add cltk to reqs, cleanup | cltk_cltk | train | py |
9505d1466f82ee764d3049c87486cff490aeb738 | diff --git a/src/main/org/openscience/cdk/graph/ConnectivityChecker.java b/src/main/org/openscience/cdk/graph/ConnectivityChecker.java
index <HASH>..<HASH> 100644
--- a/src/main/org/openscience/cdk/graph/ConnectivityChecker.java
+++ b/src/main/org/openscience/cdk/graph/ConnectivityChecker.java
@@ -62,6 +62,10 @@ public class ConnectivityChecker
@TestMethod("testIsConnected_IAtomContainer,testPartitionIntoMolecules_IsConnected_Consistency")
public static boolean isConnected(IAtomContainer atomContainer)
{
+ // with one atom or less, we define it to be connected, as there is no
+ // partitioning needed
+ if (atomContainer.getAtomCount() < 2) return true;
+
IAtomContainer newContainer = atomContainer.getBuilder().newAtomContainer();
IMolecule molecule = atomContainer.getBuilder().newMolecule();
List<IAtom> sphere = new ArrayList<IAtom>(); | With one atom or less, we define it to be connected, as there is no partitioning needed (fixes #<I>, NullPointerException on IAtomContainer with no atoms) | cdk_cdk | train | java |
d727a56138e8bbaef82b4ae12d677a3e2b107a99 | diff --git a/lib/jwt.rb b/lib/jwt.rb
index <HASH>..<HASH> 100644
--- a/lib/jwt.rb
+++ b/lib/jwt.rb
@@ -220,14 +220,14 @@ module JWT
end
def raw_to_asn1(signature, private_key)
- byte_size = (private_key.group.degree / 8.0).ceil
+ byte_size = (private_key.group.degree + 7) / 8
r = signature[0..(byte_size - 1)]
s = signature[byte_size..-1]
OpenSSL::ASN1::Sequence.new([r, s].map { |int| OpenSSL::ASN1::Integer.new(OpenSSL::BN.new(int, 2)) }).to_der
end
def asn1_to_raw(signature, public_key)
- byte_size = (public_key.group.degree / 8.0).ceil
+ byte_size = (public_key.group.degree + 7) / 8
OpenSSL::ASN1.decode(signature).value.map { |value| value.value.to_s(2).rjust(byte_size, "\x00") }.join
end
end | Use integer division in ECDSA signature conversion method | jwt_ruby-jwt | train | rb |
cab183dc1e2efc057392f06122d616d321c0327a | diff --git a/bokeh/properties.py b/bokeh/properties.py
index <HASH>..<HASH> 100644
--- a/bokeh/properties.py
+++ b/bokeh/properties.py
@@ -168,7 +168,8 @@ class PropertyDescriptor(PropertyGenerator):
return default()
def _raw_default(self):
- """The raw_default() needs to be validated and transformed by prepare_value() before use. Prefer prepared_default()."""
+ """The raw_default() needs to be validated and transformed by prepare_value()
+ before use. Prefer prepared_default()."""
return self._copy_default(self._default)
def prepared_default(self, cls, name): | Wrap long line in properties.py | bokeh_bokeh | train | py |
7f4cd0f654fdc5b66ffd1e3b1de8edc6e2968713 | diff --git a/chef/lib/chef/index_queue/amqp_client.rb b/chef/lib/chef/index_queue/amqp_client.rb
index <HASH>..<HASH> 100644
--- a/chef/lib/chef/index_queue/amqp_client.rb
+++ b/chef/lib/chef/index_queue/amqp_client.rb
@@ -68,6 +68,7 @@ class Chef
end
def disconnected!
+ Chef::Log.error("Disconnected from the AMQP Broker (RabbitMQ)")
@amqp_client = nil
reset!
end
@@ -76,12 +77,15 @@ class Chef
retries = 0
begin
exchange.publish({"action" => action.to_s, "payload" => data}.to_json)
- rescue Bunny::ServerDownError, Bunny::ConnectionError, Errno::ECONNRESET => e
- Chef::Log.error("Disconnected from the AMQP Broker, cannot queue data to the indexer")
+ rescue Bunny::ServerDownError, Bunny::ConnectionError, Errno::ECONNRESET
disconnected!
- retryies += 1
- retry unless retryies > 1
- raise e
+ if (retries += 1) < 2
+ Chef::Log.info("Attempting to reconnect to the AMQP broker")
+ retry
+ else
+ Chef::Log.fatal("Could not re-connect to the AMQP broker, giving up")
+ raise
+ end
end
end | [CHEF-<I>] fix typos in amqp retry patch | chef_chef | train | rb |
dcc1488ce92586262a3abe15477ea444a8972ce4 | diff --git a/peyotl/amendments/validation/adaptor.py b/peyotl/amendments/validation/adaptor.py
index <HASH>..<HASH> 100644
--- a/peyotl/amendments/validation/adaptor.py
+++ b/peyotl/amendments/validation/adaptor.py
@@ -80,7 +80,7 @@ class AmendmentValidationAdaptor(object):
if isinstance(self._curator, dict):
for k in self._curator.keys():
try:
- assert k in ['login', 'name']
+ assert k in ['login', 'name', 'email',]
except:
errors.append("Unexpected key '{k}' found in curator".format(k=k))
if 'login' in self._curator:
@@ -93,6 +93,12 @@ class AmendmentValidationAdaptor(object):
assert isinstance(self._curator.get('login'), string_types)
except:
errors.append("Curator 'login' should be a string")
+ if 'email' in self._curator:
+ try:
+ assert isinstance(self._curator.get('email'), string_types)
+ except:
+ # TODO: Attempt to validate as an email address?
+ errors.append("Curator 'email' should be a string (a valid email address)")
# test for a valid date_created (should be valid ISO 8601)
self._date_created = obj.get('date_created') | Update validation to accept 'curator.email' field | OpenTreeOfLife_peyotl | train | py |
0d9390866f9ce42870d3116094cd49e0019a970a | diff --git a/git/cmd.py b/git/cmd.py
index <HASH>..<HASH> 100644
--- a/git/cmd.py
+++ b/git/cmd.py
@@ -609,6 +609,12 @@ class Git(LazyMixin):
# end handle
try:
+ if sys.platform == 'win32':
+ CREATE_NO_WINDOW = 0x08000000
+ creationflags = CREATE_NO_WINDOW
+ else:
+ creationflags = None
+
proc = Popen(command,
env=env,
cwd=cwd,
@@ -619,6 +625,7 @@ class Git(LazyMixin):
shell=self.USE_SHELL,
close_fds=(os.name == 'posix'), # unsupported on windows
universal_newlines=universal_newlines,
+ creationflags=creationflags,
**subprocess_kwargs
)
except cmd_not_found_exception as err:
@@ -629,7 +636,13 @@ class Git(LazyMixin):
def _kill_process(pid):
""" Callback method to kill a process. """
- p = Popen(['ps', '--ppid', str(pid)], stdout=PIPE)
+ if sys.platform == 'win32':
+ CREATE_NO_WINDOW = 0x08000000
+ creationflags = CREATE_NO_WINDOW
+ else:
+ creationflags = None
+
+ p = Popen(['ps', '--ppid', str(pid)], stdout=PIPE, creationflags)
child_pids = []
for line in p.stdout:
if len(line.split()) > 0: | Prevent CMD windows being shown when starting git in a subprocess.
This fixes a UI problem with using GitPython from a GUI python probgram.
Each repo that is opened creates a git cat-file processs and that provess will create
a console window with out this change. | gitpython-developers_GitPython | train | py |
d0ec194d1d24fe940846e91b33fa91893d9c9ffe | diff --git a/tasks.py b/tasks.py
index <HASH>..<HASH> 100644
--- a/tasks.py
+++ b/tasks.py
@@ -36,6 +36,10 @@ def test(ctx):
@task
def publish_coverage(ctx):
if Utils.get_branch() == "master":
+ print("Downloading AWS CLI")
+ for line in cli.pull('garland/aws-cli-docker:latest', stream=True):
+ pass
+
Docker.run(
cli,
tag="garland/aws-cli-docker:latest", | downloading aws cli image first | VJftw_invoke-tools | train | py |
593489d45426615cd784761895f6c14ae6e8cdff | diff --git a/clientv3/main_test.go b/clientv3/main_test.go
index <HASH>..<HASH> 100644
--- a/clientv3/main_test.go
+++ b/clientv3/main_test.go
@@ -15,10 +15,12 @@
package clientv3_test
import (
+ "fmt"
"os"
"regexp"
"strings"
"testing"
+ "time"
"github.com/coreos/etcd/auth"
"github.com/coreos/etcd/integration"
@@ -50,6 +52,10 @@ func TestMain(m *testing.M) {
}
v = m.Run()
clus.Terminate(nil)
+ if err := testutil.CheckAfterTest(time.Second); err != nil {
+ fmt.Fprintf(os.Stderr, "%v", err)
+ os.Exit(1)
+ }
} else {
v = m.Run()
} | clientv3: use CheckAfterTest after terminating cluster
AfterTest() has a delay that waits for runtime goroutines to exit;
CheckLeakedGoroutine does not. Since the test runner manages the
test cluster for examples, there is no delay between terminating
the cluster and checking for leaked goroutines. Instead, apply
Aftertest checking before running CheckLeakedGoroutine to let runtime
http goroutines finish. | etcd-io_etcd | train | go |
fd30c7566b2a8afb1ad26fbf56e04d97aaf3b6da | diff --git a/xchange-bitbay/src/main/java/com/xeiam/xchange/bitbay/BitbayAdapters.java b/xchange-bitbay/src/main/java/com/xeiam/xchange/bitbay/BitbayAdapters.java
index <HASH>..<HASH> 100644
--- a/xchange-bitbay/src/main/java/com/xeiam/xchange/bitbay/BitbayAdapters.java
+++ b/xchange-bitbay/src/main/java/com/xeiam/xchange/bitbay/BitbayAdapters.java
@@ -88,7 +88,7 @@ public class BitbayAdapters {
for (BitbayTrade bitbayTrade : bitbayTrades) {
- Trade trade = new Trade(null, bitbayTrade.getAmount(), currencyPair, bitbayTrade.getPrice(), new Date(bitbayTrade.getDate()),
+ Trade trade = new Trade(null, bitbayTrade.getAmount(), currencyPair, bitbayTrade.getPrice(), new Date(bitbayTrade.getDate()*1000),
bitbayTrade.getTid());
tradeList.add(trade); | Fix bitbay trade timestamp. Bitbay use unix timestamp so we need to multiply it to <I>. | knowm_XChange | train | java |
e88ea4a4ac39525b47636b2f86f6ab22dc935a3e | diff --git a/app/controllers/providers_controller.rb b/app/controllers/providers_controller.rb
index <HASH>..<HASH> 100644
--- a/app/controllers/providers_controller.rb
+++ b/app/controllers/providers_controller.rb
@@ -73,7 +73,10 @@ class ProvidersController < ApplicationController
display_message = parse_display_message(error.response)
error_text = _("Subscription manifest upload for provider '%{name}' failed." % {:name => @provider.name})
error_text += _("%{newline}Reason: %{reason}" % {:reason => display_message, :newline => "<br />"}) unless display_message.blank?
- error_text += _("%{newline}If you are uploading an older manifest, you can use the Force checkbox to overwrite existing data." % { :newline => "<br />"})
+ # In some cases, force_update will allow the manifest to be uploaded when it normally would not
+ if force_update == "false"
+ error_text += _("%{newline}If you are uploading an older manifest, you can use the Force checkbox to overwrite existing data." % { :newline => "<br />"})
+ end
notice error_text, {:level => :error}
Rails.logger.error "error uploading subscriptions."
Rails.logger.error error | <I> - show error message suggesting use of force upload not shown when force upload is already set | Katello_katello | train | rb |
f837a13b8fea45ffbc2252f7296ed5c7f94ffd91 | diff --git a/addons/docs/src/frameworks/ember/jsondoc.js b/addons/docs/src/frameworks/ember/jsondoc.js
index <HASH>..<HASH> 100644
--- a/addons/docs/src/frameworks/ember/jsondoc.js
+++ b/addons/docs/src/frameworks/ember/jsondoc.js
@@ -10,7 +10,14 @@ export const getJSONDoc = () => {
export const extractArgTypes = (componentName) => {
const json = getJSONDoc();
+ if (!(json && json.included)) {
+ return {};
+ }
const componentDoc = json.included.find((doc) => doc.attributes.name === componentName);
+
+ if (!componentDoc) {
+ return '';
+ }
const rows = componentDoc.attributes.arguments.map((prop) => {
return {
name: prop.name,
@@ -29,6 +36,9 @@ export const extractArgTypes = (componentName) => {
export const extractComponentDescription = (componentName) => {
const json = getJSONDoc();
+ if (!(json && json.included)) {
+ return {};
+ }
const componentDoc = json.included.find((doc) => doc.attributes.name === componentName);
if (!componentDoc) { | return early when there's no JSDoc for a component | storybooks_storybook | train | js |
ffe5be079effb1dd8ca347e752e6c300492c54e5 | diff --git a/lib/multirepo/files/tracking-files.rb b/lib/multirepo/files/tracking-files.rb
index <HASH>..<HASH> 100644
--- a/lib/multirepo/files/tracking-files.rb
+++ b/lib/multirepo/files/tracking-files.rb
@@ -12,7 +12,7 @@ module MultiRepo
def self.stage
FILE_CLASSES.each do |c|
- Git.run_in_current_dir("add -A #{c::FILENAME}", Runner::Verbosity::OUTPUT_ON_ERROR)
+ Git.run_in_current_dir("add --force #{c::FILENAME}", Runner::Verbosity::OUTPUT_ON_ERROR)
end
end | Force-adding tracking files, in case projects' gitignore patterns match important multirepo files. | fortinmike_git-multirepo | train | rb |
93a24840371e8c137b4744898b0e80fd9f988ee0 | diff --git a/mbed/mbed.py b/mbed/mbed.py
index <HASH>..<HASH> 100644
--- a/mbed/mbed.py
+++ b/mbed/mbed.py
@@ -871,7 +871,7 @@ class Repo(object):
if rev is None or len(rev) == 0:
return 'latest' + (' revision in the current branch' if ret_rev else '')
elif re.match(r'^([a-zA-Z0-9]{12,40})$', rev) or re.match(r'^([0-9]+)$', rev):
- return 'rev' + (' #'+rev if ret_rev else '')
+ return 'rev' + (' #'+rev[0:12] if ret_rev else '')
else:
return 'branch' + (' '+rev if ret_rev else '') | Use <I>byte revision hashes in reports for consistency with developer.mbed.org
.lib references use full <I>byte hashes | ARMmbed_mbed-cli | train | py |
84baf89a7a44abe44f733188783db9a306d8e2f7 | diff --git a/commands/application.angular.js b/commands/application.angular.js
index <HASH>..<HASH> 100644
--- a/commands/application.angular.js
+++ b/commands/application.angular.js
@@ -36,7 +36,7 @@ const create = (appName, options) => {
};
const addTemplate = (appName, options, evaluatingOptions) => {
- runSchematicCommand(`add-app-template --project=${appName}`, options, evaluatingOptions);
+ runSchematicCommand(`add-app-template --project=${appName} --overwriteAppComponent`, options, evaluatingOptions);
};
const addView = (viewName, options) => { | Add overwriteAppComand option to command (#<I>) | DevExpress_devextreme-cli | train | js |
f97f413ee2d6b3b85816cd590b054405497d3bd6 | diff --git a/java/client/src/org/openqa/selenium/remote/RemoteWebDriver.java b/java/client/src/org/openqa/selenium/remote/RemoteWebDriver.java
index <HASH>..<HASH> 100644
--- a/java/client/src/org/openqa/selenium/remote/RemoteWebDriver.java
+++ b/java/client/src/org/openqa/selenium/remote/RemoteWebDriver.java
@@ -236,6 +236,10 @@ public class RemoteWebDriver implements WebDriver, JavascriptExecutor,
startSession(desiredCapabilities, null);
}
+ /**
+ * @deprecated Use {@link #startSession(Capabilities)} instead.
+ */
+ @Deprecated
@SuppressWarnings({"unchecked"})
protected void startSession(Capabilities desiredCapabilities,
Capabilities requiredCapabilities) { | No logical change: marking a method deprecated | SeleniumHQ_selenium | train | java |
4071b56ad543a5f71d5f71f9fbfce242157b4458 | diff --git a/contacts.js b/contacts.js
index <HASH>..<HASH> 100644
--- a/contacts.js
+++ b/contacts.js
@@ -1,4 +1,5 @@
var Reduce = require('flumeview-reduce')
+var isFeed = require('ssb-ref').isFeed
//track contact messages, follow, unfollow, block
module.exports = function (sbot, createLayer, config) {
@@ -7,7 +8,7 @@ module.exports = function (sbot, createLayer, config) {
var initial = false
var hops = {}
hops[sbot.id] = 0
- var index = sbot._flumeUse('contacts2', Reduce(3, function (g, data) {
+ var index = sbot._flumeUse('contacts2', Reduce(4, function (g, data) {
if(!initial) {
initial = true
layer(g = g || {})
@@ -20,7 +21,7 @@ module.exports = function (sbot, createLayer, config) {
data.value.content.following === false ? -2 :
data.value.content.blocking || data.value.content.flagged ? -1
: null
- if(from && to && value != null)
+ if(isFeed(from) && isFeed(to) && value != null)
return layer(from, to, value)
return g
})) | check that follow types are really ids, sometimes people manually following forget and use @names | ssbc_ssb-friends | train | js |
e801a1c859b6ccf3475014845ece9a7a228e74b2 | diff --git a/moco-runner/src/main/java/com/github/dreamhead/moco/parser/model/DynamicResponseHandlerFactory.java b/moco-runner/src/main/java/com/github/dreamhead/moco/parser/model/DynamicResponseHandlerFactory.java
index <HASH>..<HASH> 100644
--- a/moco-runner/src/main/java/com/github/dreamhead/moco/parser/model/DynamicResponseHandlerFactory.java
+++ b/moco-runner/src/main/java/com/github/dreamhead/moco/parser/model/DynamicResponseHandlerFactory.java
@@ -144,7 +144,7 @@ public class DynamicResponseHandlerFactory extends Dynamics implements ResponseH
public ResponseHandler apply(final Map.Entry<String, TextContainer> pair) {
String result = COMPOSITES.get(name);
if (result == null) {
- throw new RuntimeException("unknown composite handler name [" + name + "]");
+ throw new IllegalArgumentException("unknown composite handler name [" + name + "]");
}
return createResponseHandler(pair, result); | replaced exception with illegal argument exception in dynamic response handler factory | dreamhead_moco | train | java |
46fff11ac4bbf6465ada675ff92a5c3fa3c9d92b | diff --git a/src/Helper/ExportHelper.php b/src/Helper/ExportHelper.php
index <HASH>..<HASH> 100644
--- a/src/Helper/ExportHelper.php
+++ b/src/Helper/ExportHelper.php
@@ -47,6 +47,7 @@ class ExportHelper extends AbstractExportHelper
{
/**
* @inheritdoc
+ * @suppress PhanAccessMethodInternal
*/
public function encrypt($secret, $data)
{
diff --git a/src/Helper/IframeHelper.php b/src/Helper/IframeHelper.php
index <HASH>..<HASH> 100644
--- a/src/Helper/IframeHelper.php
+++ b/src/Helper/IframeHelper.php
@@ -58,7 +58,7 @@ final class IframeHelper extends AbstractHelper
*
* @param IframeInterface $iframe the iframe meta data.
* @param AccountInterface|null $account the configuration to return the url for.
- * @param UserInterface $user
+ * @param UserInterface|null $user
* @param array $params additional parameters to add to the iframe url.
* @return string the iframe url.
*/ | Fix Annotations and Suppress Phan internal method warning in phpseclib | Nosto_nosto-php-sdk | train | php,php |
aab2eda7dfd9f490c36e2e3a9d16ca12f5cebb44 | diff --git a/clientv3/watch.go b/clientv3/watch.go
index <HASH>..<HASH> 100644
--- a/clientv3/watch.go
+++ b/clientv3/watch.go
@@ -711,7 +711,11 @@ func (w *watchGrpcStream) waitCancelSubstreams(stopc <-chan struct{}) <-chan str
ws.closing = true
close(ws.outc)
ws.outc = nil
- go func() { w.closingc <- ws }()
+ w.wg.Add(1)
+ go func() {
+ defer w.wg.Done()
+ w.closingc <- ws
+ }()
case <-stopc:
}
}(w.resuming[i]) | clientv3: register waitCancelSubstreams closingc goroutine with waitgroup
Fixes #<I> | etcd-io_etcd | train | go |
4ec1a5b4394ff02c5e3f11036eb285e7ae577f96 | diff --git a/gulpfile.js b/gulpfile.js
index <HASH>..<HASH> 100644
--- a/gulpfile.js
+++ b/gulpfile.js
@@ -104,7 +104,7 @@ var amigo_url = a['AMIGO_DYNAMIC_URL'].value;
var golr_private_url = a['AMIGO_PRIVATE_GOLR_URL'].value;
var owltools_max_memory = a['OWLTOOLS_MAX_MEMORY'].value || '4G';
var owltools_runner = 'java -Xms2048M -DentityExpansionLimit=4086000 -Djava.awt.headless=true -Xmx' + owltools_max_memory + ' -jar ./java/lib/owltools-runner-all.jar';
-var owltools_ops_flags = '--merge-support-ontologies --remove-subset-entities upperlevel --remove-disjoints --reasoner elk --silence-elk';
+var owltools_ops_flags = '--merge-support-ontologies --remove-subset-entities upperlevel --remove-disjoints --silence-elk --reasoner elk';
var metadata_list = _tilde_expand_list(a['GOLR_METADATA_LIST'].value);
var metadata_string = metadata_list.join(' ');
var ontology_metadata = tilde(a['GOLR_METADATA_ONTOLOGY_LOCATION'].value); | order changed on comment from heiko | geneontology_amigo | train | js |
d29f46af9445527ef562c4c50d5cb8027e464d7f | diff --git a/item.go b/item.go
index <HASH>..<HASH> 100644
--- a/item.go
+++ b/item.go
@@ -98,6 +98,10 @@ func (item *Item) Len() int {
return item.length
}
+func (item *Item) Space() int {
+ return len(item.bytes) - item.length
+}
+
func (item *Item) TrimLastIf(b byte) bool {
l := item.Len() - 1
if l == -1 || item.bytes[l] != b {
diff --git a/item_test.go b/item_test.go
index <HASH>..<HASH> 100644
--- a/item_test.go
+++ b/item_test.go
@@ -164,3 +164,12 @@ func (i *ItemTests) CloneDetachesTheObject() {
item.Raw()[0] = '!'
Expect(actual[0]).To.Equal(byte('o'))
}
+
+func (i *ItemTests) ReturnsTheAvailableSpace() {
+ item := NewItem(10, nil)
+ Expect(item.Space()).To.Equal(10)
+ item.WriteString("hello")
+ Expect(item.Space()).To.Equal(5)
+ item.WriteString("world")
+ Expect(item.Space()).To.Equal(0)
+} | Can call item.Space() to see how much space is left. | karlseguin_bytepool | train | go,go |
Subsets and Splits
Java Commits in Train Set
Queries for all entries where the diff_languages column is 'java', providing a filtered dataset but without deeper analysis.
Java Commits Test Data
Returns a subset of 5000 entries from the dataset where the programming language difference is Java, providing basic filtering for exploration.
Java Commits Sample
Retrieves the first 1,000 records where the 'diff_languages' column is 'java', providing limited insight into the specific data entries.
Java Commits Validation Sample
Retrieves a sample of entries from the validation dataset where the diff languages are Java, providing limited insight into specific Java-related data points.
Java Commits in Validation
This query retrieves a limited sample of entries from the validation dataset where the programming language difference is Java, providing basic filtering with minimal insight.
Java Commits Sample
This query retrieves a sample of 100 records where the 'diff_languages' is 'java', providing basic filtering but limited analytical value.
Java Commits Sample
Retrieves 100 samples where the language difference is Java, providing basic filtering but minimal analytical value.
Java Commits Sample
Retrieves 10 samples where the diff_languages column is 'java', providing basic examples of data entries with this specific language.
Java Commits Validation Sample
Retrieves 1,000 records where the differences in languages are marked as Java, providing a snapshot of that specific subset but limited to raw data.
Java Commits Sample
This query retrieves 1000 random samples from the dataset where the programming language is Java, offering limited insight beyond raw data.