hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
|---|---|---|---|---|
2458e5b7afe3deb9521269f991db574ea0f3b6a1
|
diff --git a/lib/griffin/server_config_builder.rb b/lib/griffin/server_config_builder.rb
index <HASH>..<HASH> 100644
--- a/lib/griffin/server_config_builder.rb
+++ b/lib/griffin/server_config_builder.rb
@@ -33,13 +33,14 @@ module Griffin
port: 50051,
pool_size: DEFAULT_POOL_SIZE,
interceptors: [],
+ services: [],
}.freeze
def initialize
@opts = DEFAULT_SERVER_CONFIG.dup
end
- (SERVERENGINE_PRIMITIVE_CONFIGS + GRIFFIN_CONFIGS + [:interceptors]).each do |name|
+ (SERVERENGINE_PRIMITIVE_CONFIGS + GRIFFIN_CONFIGS).each do |name|
define_method(name) do |value|
@opts[name] = value
end
@@ -51,8 +52,12 @@ module Griffin
end
end
- def services(serv, *rest)
- @opts[:services] = Array(serv) + rest
+ def interceptors(*value)
+ @opts[:interceptors].concat(value).flatten!
+ end
+
+ def services(*value)
+ @opts[:services].concat(value).flatten!
end
def build
|
services and interceptors are able to be add item many times
|
cookpad_griffin
|
train
|
5bd159051949e483f63be44567a911b028c9b4fb
|
diff --git a/app/services/socializer/add_default_circles.rb b/app/services/socializer/add_default_circles.rb
index <HASH>..<HASH> 100644
--- a/app/services/socializer/add_default_circles.rb
+++ b/app/services/socializer/add_default_circles.rb
@@ -69,22 +69,22 @@ module Socializer
def acquaintances_content
"A good place to stick people you've met but " \
- "aren't particularly close to."
+ "aren't particularly close to."
end
def family_content
"Your close and extended family, with as " \
- "many or as few in-laws as you want."
+ "many or as few in-laws as you want."
end
def following_content
"People you don't know personally, but whose " \
- "posts you find interesting."
+ "posts you find interesting."
end
def friends_content
"Your real friends, the ones you feel " \
- "comfortable sharing private details with."
+ "comfortable sharing private details with."
end
end
end
diff --git a/spec/decorators/socializer/person/address_decorator_spec.rb b/spec/decorators/socializer/person/address_decorator_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/decorators/socializer/person/address_decorator_spec.rb
+++ b/spec/decorators/socializer/person/address_decorator_spec.rb
@@ -12,7 +12,7 @@ module Socializer
context "with no line2" do
let(:address_value) do
"282 Kevin Brook<br>" \
- "Imogeneborough, California 58517<br>US"
+ "Imogeneborough, California 58517<br>US"
end
specify do
@@ -25,8 +25,8 @@ module Socializer
let(:address_value) do
"282 Kevin Brook<br>" \
- "Apt. 123<br>Imogeneborough, California 58517<br>" \
- "US"
+ "Apt. 123<br>Imogeneborough, California 58517<br>" \
+ "US"
end
specify do
diff --git a/spec/decorators/socializer/person/education_decorator_spec.rb b/spec/decorators/socializer/person/education_decorator_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/decorators/socializer/person/education_decorator_spec.rb
+++ b/spec/decorators/socializer/person/education_decorator_spec.rb
@@ -25,8 +25,8 @@ module Socializer
context "with major_or_field_of_study" do
let(:education_value) do
"Hard Knocks<br>" \
- "Slacking<br>" \
- "#{decorated_education.started_on_to_ended_on}"
+ "Slacking<br>" \
+ "#{decorated_education.started_on_to_ended_on}"
end
specify do
@@ -42,7 +42,7 @@ module Socializer
let(:education_value) do
"Hard Knocks<br>" \
- "#{decorated_education.started_on_to_ended_on}"
+ "#{decorated_education.started_on_to_ended_on}"
end
specify do
diff --git a/spec/decorators/socializer/person/employment_decorator_spec.rb b/spec/decorators/socializer/person/employment_decorator_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/decorators/socializer/person/employment_decorator_spec.rb
+++ b/spec/decorators/socializer/person/employment_decorator_spec.rb
@@ -25,7 +25,7 @@ module Socializer
context "with no job_title or job_description" do
let(:employment_value) do
"Some Company<br>" \
- "#{decorated_employment.started_on_to_ended_on}"
+ "#{decorated_employment.started_on_to_ended_on}"
end
specify do
@@ -39,8 +39,8 @@ module Socializer
let(:employment_value) do
"Some Company<br>" \
- "My Title<br>" \
- "#{decorated_employment.started_on_to_ended_on}"
+ "My Title<br>" \
+ "#{decorated_employment.started_on_to_ended_on}"
end
specify do
@@ -56,8 +56,8 @@ module Socializer
let(:employment_value) do
"Some Company<br>" \
- "Description<br>" \
- "#{decorated_employment.started_on_to_ended_on}"
+ "Description<br>" \
+ "#{decorated_employment.started_on_to_ended_on}"
end
specify do
@@ -74,9 +74,9 @@ module Socializer
let(:employment_value) do
"Some Company<br>" \
- "My Title<br>" \
- "Description<br>" \
- "#{decorated_employment.started_on_to_ended_on}"
+ "My Title<br>" \
+ "Description<br>" \
+ "#{decorated_employment.started_on_to_ended_on}"
end
specify do
|
Layout/LineEndStringConcatenationIndentation: Indent the first part of a string concatenated with backslash.
|
socializer_socializer
|
train
|
5c0266d28bc7da7717db5d35ebfee7532a71c178
|
diff --git a/pyemma/coordinates/clustering/uniform_time.py b/pyemma/coordinates/clustering/uniform_time.py
index <HASH>..<HASH> 100644
--- a/pyemma/coordinates/clustering/uniform_time.py
+++ b/pyemma/coordinates/clustering/uniform_time.py
@@ -98,7 +98,7 @@ class UniformTimeClustering(AbstractClustering):
if itraj != last_itraj:
if last_itraj != -1:
- self._tprev += iterator.trajectory_length(last_itraj, stride=stride)
+ self._tprev += self.trajectory_length(itraj=last_itraj, stride=stride)
last_itraj = itraj
t = 0
|
[clustering/uniform_time] fix tprev length assignment
|
markovmodel_PyEMMA
|
train
|
2108a54fcb6287113bb7ceb13e4cbbb8933bdb02
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -8,16 +8,18 @@ SOURCE_DIR = os.path.join(ROOT_DIR)
test_requirements = []
setup(
name="docker-py",
- version='0.1.5',
+ version='0.2.0',
description="Python client for Docker.",
packages=['docker'],
install_requires=['requests', 'six'] + test_requirements,
zip_safe=False,
test_suite='tests',
- classifiers=['Development Status :: 3 - Alpha',
+ classifiers=['Development Status :: 4 - Beta',
'Environment :: Other Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
- 'Topic :: Utilities'],
+ 'Topic :: Utilities',
+ 'License :: OSI Approved :: Apache Software License'
+],
)
|
Bumped version in setup.py and changed development status from alpha to beta
|
docker_docker-py
|
train
|
0a05be85c860aa9dccd68df09d7b3a5668866e61
|
diff --git a/lib/index.js b/lib/index.js
index <HASH>..<HASH> 100644
--- a/lib/index.js
+++ b/lib/index.js
@@ -225,6 +225,17 @@ exports.createAPI = function (options, callback) {
return callback(null, docs);
});
};
+ hoodie.user.remove = function (username, callback) {
+ var _id = 'org.couchdb.user:' + username;
+ var url = hoodie._resolve('/_users/' + uc(_id));
+ hoodie.request('GET', url, function (err, doc) {
+ if (err) {
+ return callback(err);
+ }
+ var opt = {data: {rev: doc._rev}};
+ hoodie.request('DELETE', url, opt, callback);
+ });
+ };
// stops listening to changes feeds etc.
hoodie._stop = function (callback) {
diff --git a/test/test-plugin-api.js b/test/test-plugin-api.js
index <HASH>..<HASH> 100644
--- a/test/test-plugin-api.js
+++ b/test/test-plugin-api.js
@@ -378,12 +378,14 @@ exports['automatically update app config from couch'] = function (test) {
});
};
-exports['user.add / user.list'] = function (test) {
+exports['user.add / user.list / user.remove'] = function (test) {
var hoodie = this.hoodie;
async.series([
hoodie.user.findAll,
async.apply(hoodie.user.add, 'testuser', 'testing'),
hoodie.user.findAll,
+ async.apply(hoodie.user.remove, 'testuser'),
+ hoodie.user.findAll
],
function (err, results) {
if (err) {
@@ -391,9 +393,11 @@ exports['user.add / user.list'] = function (test) {
}
var docs1 = results[0];
var docs2 = results[2];
+ var docs3 = results[4];
test.equal(docs1.length, 0);
test.equal(docs2.length, 1);
test.equal(docs2[0].name, 'testuser');
+ test.equal(docs3.length, 0);
test.done();
});
};
|
added user.remove method
This commit was sponsored by The Hoodie Firm
with support from NLnet: <URL>
|
hoodiehq-archive_hoodie-plugins-api
|
train
|
82049667a356cf360d5c6eb239caed83c6d67b31
|
diff --git a/aeron-driver/src/main/java/io/aeron/driver/PublicationImage.java b/aeron-driver/src/main/java/io/aeron/driver/PublicationImage.java
index <HASH>..<HASH> 100644
--- a/aeron-driver/src/main/java/io/aeron/driver/PublicationImage.java
+++ b/aeron-driver/src/main/java/io/aeron/driver/PublicationImage.java
@@ -358,8 +358,9 @@ public class PublicationImage
}
else if (null != lossReport)
{
- reportEntry = lossReport.createEntry(
- length, cachedEpochClock.time(), sessionId, streamId, channel(), sourceAddress.toString());
+ final String source = Configuration.sourceIdentity(sourceAddress);
+ final long timeMs = cachedEpochClock.time();
+ reportEntry = lossReport.createEntry(length, timeMs, sessionId, streamId, channel(), source);
if (null == reportEntry)
{
|
[Java] Use standard way of reporting source identity.
|
real-logic_aeron
|
train
|
b901a32217106beb94721e2363e18abecd2acf82
|
diff --git a/tests/test_pandas_dataset.py b/tests/test_pandas_dataset.py
index <HASH>..<HASH> 100644
--- a/tests/test_pandas_dataset.py
+++ b/tests/test_pandas_dataset.py
@@ -751,7 +751,6 @@ class TestPandasDataset(unittest.TestCase):
'y' : [5.0, 5],
'z' : [0, 10],
'n' : [0, None],
- 's' : ['s', np.nan],
'b' : [True, False],
})
@@ -802,10 +801,13 @@ class TestPandasDataset(unittest.TestCase):
'out':{'success':True, 'true_value':0.5}}
]
- for t in T:
+ for t in T[1:]:
out = typedf.expect_column_mean_to_be_between(**t['in'])
self.assertEqual(out, t['out'])
+ with self.assertRaises(TypeError):
+ typedf.expect_column_mean_to_be_between(T[0]['in'])
+
def test_expect_column_stdev_to_be_between(self):
|
reduced a TypeError in expect_column_mean_to_be_between
|
great-expectations_great_expectations
|
train
|
4b4294eb6c0fac64f92c54290c9db4826a9c4ad1
|
diff --git a/epic2cf/data.py b/epic2cf/data.py
index <HASH>..<HASH> 100644
--- a/epic2cf/data.py
+++ b/epic2cf/data.py
@@ -308,10 +308,10 @@ epic_map = {
902 : dict(standard_name=None, long_name=None, units=None, convert=lambda x: x, cf_units=None, cell_methods=None), # Irr:Irradiance(CTD) : :uEin cm-2 s-1: :,
903 : dict(standard_name='mass_concentration_of_chlorophyll_in_sea_water', long_name='Chlorophyll', units='mg/m^3', convert=lambda x: x/1000000., cf_units='kg/m^3', cell_methods=None), # F :Fluorometer (CTD) : :mg m-3: :J.Murray 5/13/92,
904 : dict(standard_name=None, long_name=None, units=None, convert=lambda x: x, cf_units=None, cell_methods=None), # Tr :Transmissometry (CTD) : :%: :J.Murray 5/13/92,
- 905 : dict(standard_name='downwelling_photosynthetic_photon_flux_in_sea_water', long_name='Photosynthetic Active Radiation (PAR)', units='µE m-2 s-1', convert=lambda x: x, cf_units='mol m-2 s-1', cell_methods=None), # PAR:Photosynthetic Active Radiation:par:uEin m-2 s-1: :J.Murray 5/13/92,
+ 905 : dict(standard_name='downwelling_photosynthetic_radiative_flux_in_sea_water', long_name='Photosynthetic Active Radiation (PAR)', units='µE m-2 s-1', convert=lambda x: x, cf_units='mol m-2 s-1', cell_methods=None), # PAR:Photosynthetic Active Radiation:par:uEin m-2 s-1: :J.Murray 5/13/92,
906 : dict(standard_name='mass_concentration_of_chlorophyll_in_sea_water', long_name='Chlorophyll-a (fluorometric)', units='µg/L', convert=lambda x: x/1000000., cf_units='kg/m^3', cell_methods=None), # Fch:Chlorophyll A : :ugrams/l: :(fluorometric) J.Murray 5/9/94,
907 : dict(standard_name=None, long_name=None, units=None, convert=lambda x: x, cf_units=None, cell_methods=None), # Fph:Phaeopigments : :ugrams/l: :(fluorometric) J.Murray 5/9/94,
- 908 : dict(standard_name='downwelling_photosynthetic_photon_flux_in_sea_water', long_name='Photosynthetic Active Radiation (PAR)', units='µE cm-2 s-1', convert=lambda x: x/100000., cf_units='mol m-2 s-1', cell_methods=None), # PAR:Photosynthetic Active Radiation:par:uEin cm-2 s-1: :S.Salo 7/1/98,
+ 908 : dict(standard_name='downwelling_photosynthetic_radiative_flux_in_sea_water', long_name='Photosynthetic Active Radiation (PAR)', units='µE cm-2 s-1', convert=lambda x: x/100000., cf_units='mol m-2 s-1', cell_methods=None), # PAR:Photosynthetic Active Radiation:par:uEin cm-2 s-1: :S.Salo 7/1/98,
910 : dict(standard_name='relative_humidity', long_name='Relative Humidity', units='percent', convert=lambda x: x, cf_units='percent', cell_methods=None), # RH :RELATIVE HUMIDITY (%) :rh:%: :PERCENT,
911 : dict(standard_name='relative_humidity', long_name='Relative Humidity', units='percent', convert=lambda x: x, cf_units='percent', cell_methods='time: minimum'), # RH :RELATIVE HUMIDITY MIN (%):rh:%: :,
912 : dict(standard_name='relative_humidity', long_name='Relative Humidity', units='percent', convert=lambda x: x, cf_units='percent', cell_methods='time: maximum'), # RH :RELATIVE HUMIDITY MAX (%):rh:%: :,
|
Fix the PAR standard_names (<I> and <I>)
|
axiom-data-science_epic2cf
|
train
|
de94c59511097e0267ecd2ab7929196106dfcaab
|
diff --git a/public/js/actions/sources.js b/public/js/actions/sources.js
index <HASH>..<HASH> 100644
--- a/public/js/actions/sources.js
+++ b/public/js/actions/sources.js
@@ -136,6 +136,8 @@ function selectSource(id: string, options: SelectSourceOptions = {}) {
// Make sure to start a request to load the source text.
dispatch(loadSourceText(source));
+ dispatch({ type: constants.TOGGLE_FILE_SEARCH, searchOn: false });
+
dispatch({
type: constants.SELECT_SOURCE,
source: source,
|
close source search when a source is selected
|
firefox-devtools_debugger
|
train
|
650b494ca56c3af9306d9153374fd9aa76f5e57c
|
diff --git a/models/fallahi_eval/assemble_pysb.py b/models/fallahi_eval/assemble_pysb.py
index <HASH>..<HASH> 100644
--- a/models/fallahi_eval/assemble_pysb.py
+++ b/models/fallahi_eval/assemble_pysb.py
@@ -5,10 +5,12 @@ from pysb.integrate import Solver
from indra.statements import *
from indra.mechlinker import MechLinker
import indra.tools.assemble_corpus as ac
-from indra.databases import context_client, cbio_client
+from indra.databases import context_client, cbio_client, hgnc_client, \
+ uniprot_client
from indra.assemblers import PysbAssembler, IndexCardAssembler
from util import prefixed_pkl, pklload
-from process_data import antibody_map, cell_lines, read_ccle_variants
+from process_data import antibody_map, cell_lines, read_ccle_variants, \
+ drug_targets, drug_grounding, agent_from_gene_name
def assemble_pysb(stmts, data_genes, contextualize=False):
# Filter the INDRA Statements to be put into the model
@@ -50,6 +52,11 @@ def assemble_pysb(stmts, data_genes, contextualize=False):
# Save the Statements here
ac.dump_statements(stmts, prefixed_pkl('pysb_stmts'))
+
+ # Add drug target Statements
+ drug_target_stmts = get_drug_target_statements()
+ stmts += drug_target_stmts
+
# Just generate the generic model
pa = PysbAssembler()
pa.add_statements(stmts)
@@ -77,6 +84,18 @@ def assemble_pysb(stmts, data_genes, contextualize=False):
pickle.dump(model, f)
+def get_drug_target_statements():
+ stmts = []
+ for drug, targets in drug_targets.items():
+ for target in targets:
+ target_agent = agent_from_gene_name(target)
+ drug_agent = Agent(drug, db_refs=drug_grounding[drug])
+ st = DecreaseAmount(drug_agent, target_agent)
+ stmts.append(st)
+ return stmts
+
+
+
def contextualize_stmts(stmts, cell_line, genes):
"""Contextualize model at the level of INDRA Statements."""
to_remove = []
|
Add drug-target statements to PySB assembly
|
sorgerlab_indra
|
train
|
d4ca79b6be10a4d983357f91a870c5fc5e79bbfb
|
diff --git a/lib/http_objects/version.rb b/lib/http_objects/version.rb
index <HASH>..<HASH> 100644
--- a/lib/http_objects/version.rb
+++ b/lib/http_objects/version.rb
@@ -1,3 +1,3 @@
module HttpObjects
- VERSION = "0.0.3"
+ VERSION = "0.0.4pre"
end
|
Bumping to version <I>pre
|
rogerleite_http_objects
|
train
|
86482642be2cec18dd9c1d4ab92f93b7d44131ae
|
diff --git a/nfc/tag/__init__.py b/nfc/tag/__init__.py
index <HASH>..<HASH> 100644
--- a/nfc/tag/__init__.py
+++ b/nfc/tag/__init__.py
@@ -162,9 +162,10 @@ class Tag(object):
"""
import nfc.ndef
-
- if len(self._data) > 3:
- try: return nfc.ndef.Message(str(self._data))
+
+ if len(self.octets) > 3:
+ try:
+ return nfc.ndef.Message(self.octets)
except nfc.ndef.parser_error as error:
log.error(repr(error))
@@ -173,11 +174,33 @@ class Tag(object):
@message.setter
def message(self, msg):
+ self.octets = bytes(msg)
+
+ @property
+ def octets(self):
+ """Read or write NDEF message data octets.
+
+ .. versionadded:: 0.12
+
+ The *octets* attribute returns the NDEF message data
+ octets as bytes. A bytes or bytearray sequence assigned to
+ *octets* is immediately written to the NDEF message data
+ area, unless the Tag memory is write protected or to
+ small. ::
+
+ if tag.ndef is not None:
+ print(hexlify(tag.ndef.octets))
+
+ """
+ return bytes(self._data)
+
+ @octets.setter
+ def octets(self, data):
if not self._writeable:
- raise AttributeError("ndef message is not writeable")
- data = bytearray(str(msg))
+ raise AttributeError("tag ndef area is not writeable")
+ data = bytearray(data)
if len(data) > self.capacity:
- raise ValueError("ndef message size exceeds capacity")
+ raise ValueError("data length exceeds tag capacity")
self._write_ndef_data(data)
self._data = data
|
new Tag.ndef.octets attribute to prepare transition to ndeflib
|
nfcpy_nfcpy
|
train
|
82dd699eb5f6e0e13102d9c5af5916338f9f9604
|
diff --git a/expr/func_avgseries.go b/expr/func_avgseries.go
index <HASH>..<HASH> 100644
--- a/expr/func_avgseries.go
+++ b/expr/func_avgseries.go
@@ -35,6 +35,11 @@ func (s *FuncAvgSeries) Exec(cache map[Req][]models.Series) ([]models.Series, er
}
series = append(series, in...)
}
+
+ if len(series) == 0 {
+ return series, nil
+ }
+
if len(series) == 1 {
name := fmt.Sprintf("averageSeries(%s)", series[0].QueryPatt)
series[0].Target = name
diff --git a/expr/func_sumseries.go b/expr/func_sumseries.go
index <HASH>..<HASH> 100644
--- a/expr/func_sumseries.go
+++ b/expr/func_sumseries.go
@@ -36,6 +36,10 @@ func (s *FuncSumSeries) Exec(cache map[Req][]models.Series) ([]models.Series, er
series = append(series, in...)
}
+ if len(series) == 0 {
+ return series, nil
+ }
+
if len(series) == 1 {
name := fmt.Sprintf("sumSeries(%s)", series[0].QueryPatt)
series[0].Target = name
|
make sum/avg gracefully handle empty lists
|
grafana_metrictank
|
train
|
53864a67c4cc56593dd0166e90270edea94355da
|
diff --git a/kuyruk/kuyruk.py b/kuyruk/kuyruk.py
index <HASH>..<HASH> 100644
--- a/kuyruk/kuyruk.py
+++ b/kuyruk/kuyruk.py
@@ -62,7 +62,7 @@ class Kuyruk:
yield ch
@contextmanager
- def connection(self) -> Iterator[amqp.Connection]:
+ def connection(self, vhost: str = None, user: str = None, password: str = None) -> Iterator[amqp.Connection]:
"""Returns a new connection as a context manager."""
TCP_USER_TIMEOUT = 18 # constant is available on Python 3.6+.
socket_settings = {TCP_USER_TIMEOUT: self.config.TCP_USER_TIMEOUT}
@@ -72,9 +72,9 @@ class Kuyruk:
conn = amqp.Connection(
host="%s:%s" % (self.config.RABBIT_HOST, self.config.RABBIT_PORT),
- userid=self.config.RABBIT_USER,
- password=self.config.RABBIT_PASSWORD,
- virtual_host=self.config.RABBIT_VIRTUAL_HOST,
+ userid=user or self.config.RABBIT_USER,
+ password=password or self.config.RABBIT_PASSWORD,
+ virtual_host=vhost or self.config.RABBIT_VIRTUAL_HOST,
connect_timeout=self.config.RABBIT_CONNECT_TIMEOUT,
read_timeout=self.config.RABBIT_READ_TIMEOUT,
write_timeout=self.config.RABBIT_WRITE_TIMEOUT,
|
allow overriding vhost, user and password when connecting
|
cenkalti_kuyruk
|
train
|
6392596b137baab59cee3ed4f8c408fe108a8a86
|
diff --git a/menuconfig.py b/menuconfig.py
index <HASH>..<HASH> 100755
--- a/menuconfig.py
+++ b/menuconfig.py
@@ -130,8 +130,9 @@ _N_SCROLL_ARROWS = 14
# Lines of help text shown at the bottom of the "main" display
_MAIN_HELP_LINES = """
-[Space/Enter] Toggle/enter [ESC] Leave menu [S] Save [O] Load
-[?] Symbol info [/] Jump to symbol [A] Toggle show-all mode
+[Space/Enter] Toggle/enter [ESC] Leave menu [S] Save
+[O] Load [?] Symbol info [/] Jump to symbol
+[A] Toggle show-all mode [C] Toggle show-name mode
[Q] Quit (prompts for save) [D] Save minimal config (advanced)
"""[1:-1].split("\n")
@@ -368,6 +369,9 @@ def menuconfig(kconf):
#
# Invisible items are drawn in a different style to make them stand out.
#
+# _show_name:
+# If True, the names of all symbol are shown in addition to the prompt.
+#
# _conf_changed:
# True if the configuration has been changed. If False, we don't bother
# showing the save-and-quit dialog.
@@ -380,6 +384,7 @@ def _menuconfig(stdscr):
globals()["stdscr"] = stdscr
global _conf_changed
+ global _show_name
_init()
@@ -494,6 +499,9 @@ def _menuconfig(stdscr):
elif c in ("a", "A"):
_toggle_show_all()
+ elif c in ("c", "C"):
+ _show_name = not _show_name
+
elif c in ("q", "Q"):
res = quit_dialog()
if res:
@@ -543,6 +551,8 @@ def _init():
global _sel_node_i
global _menu_scroll
+ global _show_name
+
global _conf_changed
# Looking for this in addition to KEY_BACKSPACE (which is unreliable) makes
@@ -581,11 +591,14 @@ def _init():
_parent_screen_rows = []
# Initial state
+
_cur_menu = _kconf.top_node
_shown = _shown_nodes(_cur_menu)
_sel_node_i = 0
_menu_scroll = 0
+ _show_name = False
+
# Give windows their initial size
_resize_main()
@@ -918,9 +931,14 @@ def _draw_main():
if _menu_scroll < _max_scroll(_shown, _menu_win):
_safe_hline(_bot_sep_win, 0, 4, curses.ACS_DARROW, _N_SCROLL_ARROWS)
- # Indicate when show-all mode is enabled
+ # Indicate when show-all and/or show-name mode is enabled
+ enabled_modes = []
if _show_all:
- s = "Show-all mode enabled"
+ enabled_modes.append("show-all")
+ if _show_name:
+ enabled_modes.append("show-name")
+ if enabled_modes:
+ s = " and ".join(enabled_modes) + " mode enabled"
_safe_addstr(_bot_sep_win, 0, term_width - len(s) - 2, s)
_bot_sep_win.noutrefresh()
@@ -2159,14 +2177,17 @@ def _node_str(node):
parent = parent.parent
# This approach gives nice alignment for empty string symbols ("() Foo")
- s = "{:{}} ".format(_value_str(node), 3 + indent)
+ s = "{:{}}".format(_value_str(node), 3 + indent)
- if not node.prompt:
- # Show the symbol/choice name in <> brackets if it has no prompt. This
- # path can only hit in show-all mode.
- s += "<{}>".format(node.item.name)
+ # 'not node.prompt' can only be True in show-all mode
+ if (not node.prompt or _show_name) and \
+ (isinstance(node.item, Symbol) or
+ (isinstance(node.item, Choice) and node.item.name)):
- else:
+ s += " <{}>".format(node.item.name)
+
+ if node.prompt:
+ s += " "
if node.item == COMMENT:
s += "*** {} ***".format(node.prompt[0])
else:
|
menuconfig: Add mode for showing all symbol names
Pressing 'c' toggles show-name mode, where names of symbols are
displayed before their prompt.
|
ulfalizer_Kconfiglib
|
train
|
c1ba82ab8091a6f782d815ec0fc61dc23eecaaa4
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -7,7 +7,8 @@ var core = {};
// load core modules from builtin dir
fs.readdirSync(path.resolve(__dirname, 'builtin')).forEach(function(file) {
- core[path.basename(file, '.js')] = path.resolve(__dirname, 'builtin', file);
+ if (file[0] === '_') return;
+ core[path.basename(file, '.js')] = path.resolve(__dirname, 'builtin', file);
});
// manually resolve modules that would otherwise resolve as core
diff --git a/test/node/node-test.js b/test/node/node-test.js
index <HASH>..<HASH> 100644
--- a/test/node/node-test.js
+++ b/test/node/node-test.js
@@ -7,19 +7,25 @@ test('test that all the modules are set', function (t) {
'assert',
'buffer',
'child_process',
+ 'cluster',
'console',
'constants',
'crypto',
'dgram',
+ 'dns',
+ 'domain',
'events',
'fs',
'http',
'https',
'net',
+ 'os',
'path',
'process',
'punycode',
'querystring',
+ 'readline',
+ 'repl',
'stream',
'string_decoder',
'sys',
|
[fix] don't include internal files in module object
|
alexgorbatchev_node-browser-builtins
|
train
|
0094bb58c34b6eb83a08060881726b7d514c3483
|
diff --git a/src/db/clients/sqlserver.js b/src/db/clients/sqlserver.js
index <HASH>..<HASH> 100644
--- a/src/db/clients/sqlserver.js
+++ b/src/db/clients/sqlserver.js
@@ -59,7 +59,7 @@ const executePromiseQuery = (connection, query) => new Promise(async (resolve, r
if (isSelect) {
resolve({
rows: recordSet,
- fields: Object.keys(recordSet[0] || {}).map(k => ({name: k})),
+ fields: Object.keys(recordSet[0] || {}).map(name => ({ name })),
rowCount: recordSet.length,
affectedRows: undefined,
});
|
Use a better name for property in the map function
|
sqlectron_sqlectron-core
|
train
|
4c2a04ee16ec0ca7a8abebb33757270a02345f74
|
diff --git a/mod/quiz/format.php b/mod/quiz/format.php
index <HASH>..<HASH> 100644
--- a/mod/quiz/format.php
+++ b/mod/quiz/format.php
@@ -6,7 +6,7 @@
/// Doesn't do everything on it's own -- it needs to be extended. //
////////////////////////////////////////////////////////////////////
-// Included by import.php
+// Included by import.php and export.php
class quiz_default_format {
@@ -142,6 +142,80 @@ class quiz_default_format {
return true;
}
+// Export functions
+
+
+ function exportpreprocess($category) {
+ /// Does any pre-processing that may be desired
+
+ $this->category = $category; // Important
+
+ return true;
+ }
+
+ function exportprocess($filename) {
+ /// Exports a given category. There's probably little need to change this
+
+ global $CFG;
+
+ // create a directory for the exports (if not already existing)
+ $dirname = get_string("exportfilename","quiz");
+ $courseid = $this->category->course;
+ $path = $CFG->dataroot.'/'.$courseid.'/'.$dirname;
+ if (!is_dir($path)) {
+ if (!mkdir($path, $CFG->directorypermissions)) {
+ error("Cannot create path: $path");
+ }
+ }
+
+ // get the questions (from database) in this category
+ // $questions = get_records("quiz_questions","category",$this->category->id);
+ $questions = get_questions_category( $this->category );
+
+ notify("Exporting ".count($questions)." questions.");
+ $count = 0;
+
+ // results are first written into string (and then to a file)
+ // so create/initialize the string here
+ $expout = "";
+
+ // iterate through questions
+ foreach($questions as $question) {
+ $count++;
+ echo "<hr><p><b>$count</b>. ".stripslashes($question->questiontext)."</p>";
+ $expout .= $this->writequestion( $question );
+ }
+
+ // write file
+ $filepath = $path."/".$filename;
+ if (!$fh=fopen($filepath,"w")) {
+ error("Cannot open for writing: $filepath");
+ }
+ if (!fwrite($fh, $expout)) {
+ error("Cannot write exported questions to $filepath");
+ }
+ fclose($fh);
+
+ return true;
+ }
+
+ function exportpostprocess() {
+ /// Does any post-processing that may be desired
+ /// Argument is a simple array of question ids that
+ /// have just been added.
+
+ return true;
+ }
+
+ function writequestion($question) {
+ /// Turns a question object into textual output in the given format
+ /// must be overidden
+
+ echo "<p>This quiz format has not yet been completed!</p>";
+
+ return NULL;
+ }
+
}
?>
|
Added 'virtual' method for writequestion
|
moodle_moodle
|
train
|
b1be0b5d0c1147d2ee1bb4175dfb232b1e058685
|
diff --git a/lib/svtplay_dl/service/picsearch.py b/lib/svtplay_dl/service/picsearch.py
index <HASH>..<HASH> 100644
--- a/lib/svtplay_dl/service/picsearch.py
+++ b/lib/svtplay_dl/service/picsearch.py
@@ -21,15 +21,19 @@ class Picsearch(Service, OpenGraphThumbMixin):
ajax_auth = re.search(r"picsearch_ajax_auth = '(\w+)'", data)
if not ajax_auth:
- log.error("Cant find token for video")
- return
+ ajax_auth = re.search(r'screen9-ajax-auth="([^"]+)"', data)
+ if not ajax_auth:
+ log.error("Cant find token for video")
+ return
mediaid = re.search(r"mediaId = '([^']+)';", self.get_urldata())
if not mediaid:
mediaid = re.search(r'media-id="([^"]+)"', self.get_urldata())
if not mediaid:
- log.error("Cant find media id")
- return
- jsondata = self.http.request("get", "http://csp.picsearch.com/rest?jsonp=&eventParam=1&auth=%s&method=embed&mediaid=%s" % (ajax_auth.group(1), mediaid.group(1))).content
+ mediaid = re.search(r'screen9-mid="([^"]+)"', self.get_urldata())
+ if not mediaid:
+ log.error("Cant find media id")
+ return
+ jsondata = self.http.request("get", "http://csp.picsearch.com/rest?jsonp=&eventParam=1&auth=%s&method=embed&mediaid=%s" % (ajax_auth.group(1), mediaid.group(1))).text
jsondata = json.loads(jsondata)
playlist = jsondata["media"]["playerconfig"]["playlist"][1]
if "bitrates" in playlist:
|
picsearch: support for screen9 videos
|
spaam_svtplay-dl
|
train
|
f84b19748d7d0dfda496b73ab365a2e64b377696
|
diff --git a/tests/languages/python_test.py b/tests/languages/python_test.py
index <HASH>..<HASH> 100644
--- a/tests/languages/python_test.py
+++ b/tests/languages/python_test.py
@@ -7,7 +7,6 @@ import sys
import mock
import pytest
-from pre_commit import parse_shebang
from pre_commit.languages import python
@@ -45,12 +44,7 @@ def test_sys_executable_matches_does_not_match(v):
),
)
def test_find_by_sys_executable(exe, realpath, expected):
- def mocked_find_executable(exe):
- return exe.rpartition('/')[2]
with mock.patch.object(sys, 'executable', exe):
with mock.patch.object(os.path, 'realpath', return_value=realpath):
- with mock.patch.object(
- parse_shebang, 'find_executable',
- side_effect=mocked_find_executable,
- ):
+ with mock.patch.object(python, 'find_executable', lambda x: x):
assert python._find_by_sys_executable() == expected
|
Patch the correct find_executable
|
pre-commit_pre-commit
|
train
|
83b3d11ddc02e829a1b393e9acd9045fab0b777f
|
diff --git a/src/com/esotericsoftware/kryo/util/Util.java b/src/com/esotericsoftware/kryo/util/Util.java
index <HASH>..<HASH> 100644
--- a/src/com/esotericsoftware/kryo/util/Util.java
+++ b/src/com/esotericsoftware/kryo/util/Util.java
@@ -33,15 +33,19 @@ import java.lang.reflect.Type;
public class Util {
static public final boolean isAndroid = "Dalvik".equals(System.getProperty("java.vm.name"));
- /** True if Unsafe is available. */
+ /** True if Unsafe is available. Unsafe can be disabled by setting the system property "kryo.unsafe" to "false". */
static public final boolean unsafe;
static {
boolean found = false;
- try {
- found = Class.forName("com.esotericsoftware.kryo.unsafe.UnsafeUtil", true, FieldSerializer.class.getClassLoader())
- .getField("unsafe").get(null) != null;
- } catch (Throwable ex) {
- if (TRACE) trace("kryo", "Unsafe is unavailable.", ex);
+ if ("false".equals(System.getProperty("kryo.unsafe"))) {
+ if (TRACE) trace("kryo", "Unsafe is disabled.");
+ } else {
+ try {
+ found = Class.forName("com.esotericsoftware.kryo.unsafe.UnsafeUtil", true, FieldSerializer.class.getClassLoader())
+ .getField("unsafe").get(null) != null;
+ } catch (Throwable ex) {
+ if (TRACE) trace("kryo", "Unsafe is unavailable.", ex);
+ }
}
unsafe = found;
}
|
Added the system property "kryo.unsafe" to disable accessing Unsafe.
closes #<I>
|
EsotericSoftware_kryo
|
train
|
8ac92bcdbc7a69d1aea7a55578ecf24cc0a7fcef
|
diff --git a/cell/lrp_test.go b/cell/lrp_test.go
index <HASH>..<HASH> 100644
--- a/cell/lrp_test.go
+++ b/cell/lrp_test.go
@@ -123,7 +123,7 @@ var _ = Describe("LRP", func() {
Context("when it's unhealthy for longer than its start timeout", func() {
BeforeEach(func() {
- lrp.StartTimeout = 5
+ lrp.StartTimeoutMs = 5000
lrp.Monitor = models.WrapAction(&models.RunAction{
User: "vcap",
diff --git a/cell/ssh_test.go b/cell/ssh_test.go
index <HASH>..<HASH> 100644
--- a/cell/ssh_test.go
+++ b/cell/ssh_test.go
@@ -127,11 +127,11 @@ var _ = Describe("SSH", func() {
Path: "nc",
Args: []string{"-z", "127.0.0.1", "3456"},
}),
- StartTimeout: 60,
- RootFs: "preloaded:" + helpers.PreloadedStacks[0],
- MemoryMb: 128,
- DiskMb: 128,
- Ports: []uint32{3456},
+ StartTimeoutMs: 60000,
+ RootFs: "preloaded:" + helpers.PreloadedStacks[0],
+ MemoryMb: 128,
+ DiskMb: 128,
+ Ports: []uint32{3456},
Routes: &models.Routes{
routes.DIEGO_SSH: &sshRouteMessage,
},
@@ -211,7 +211,7 @@ var _ = Describe("SSH", func() {
Context("when a bare-bones docker image is used as the root filesystem", func() {
BeforeEach(func() {
- lrp.StartTimeout = 120
+ lrp.StartTimeoutMs = 120000
lrp.RootFs = "docker:///cloudfoundry/diego-docker-app"
// busybox nc requires -p but ubuntu's won't allow it
|
Change Timeout to Milliseconds across diego components
[#<I>]
|
cloudfoundry_inigo
|
train
|
943161a480d06adef6b3e919956f2d874af7dc94
|
diff --git a/sem/manager.py b/sem/manager.py
index <HASH>..<HASH> 100644
--- a/sem/manager.py
+++ b/sem/manager.py
@@ -300,9 +300,14 @@ class CampaignManager(object):
next_runs = self.db.get_next_rngruns()
available_params = [r['params'] for r in self.db.get_results()]
for param_comb in param_list:
- needed_runs = runs - len([p for p in available_params if
- param_comb == {k: p[k] for k in
- p.keys() if k != "RngRun"}])
+ # Count how many param combinations we found, and remove them
+ # from the list of available_params for faster searching in the
+ # future
+ needed_runs = runs
+ for i, p in enumerate(available_params):
+ if param_comb == {k: p[k] for k in p.keys() if k != "RngRun"}:
+ needed_runs -= 1
+ del available_params[i]
new_param_combs = []
for needed_run in range(needed_runs):
# Here it's important that we make copies of the
|
Remove items from the search list as they are found
|
signetlabdei_sem
|
train
|
c7ab8fb08273165ae7d3309b85bae256bce68bf7
|
diff --git a/src/PhpDoc/TypeNodeResolver.php b/src/PhpDoc/TypeNodeResolver.php
index <HASH>..<HASH> 100644
--- a/src/PhpDoc/TypeNodeResolver.php
+++ b/src/PhpDoc/TypeNodeResolver.php
@@ -65,7 +65,7 @@ class TypeNodeResolver
public function getCacheKey(): string
{
- $key = 'v50';
+ $key = 'v51';
foreach ($this->extensions as $extension) {
$key .= sprintf('-%s', $extension->getCacheKey());
}
@@ -146,6 +146,7 @@ class TypeNodeResolver
new FloatType(),
new StringType(),
new BooleanType(),
+ new NullType(),
]);
case 'number':
|
null is also scalar
|
phpstan_phpstan
|
train
|
7d2b8f3ac98e11ab34e3395dd921984b8dd61e9a
|
diff --git a/activejob/test/cases/argument_serialization_test.rb b/activejob/test/cases/argument_serialization_test.rb
index <HASH>..<HASH> 100644
--- a/activejob/test/cases/argument_serialization_test.rb
+++ b/activejob/test/cases/argument_serialization_test.rb
@@ -12,7 +12,7 @@ class ArgumentSerializationTest < ActiveSupport::TestCase
end
[ nil, 1, 1.0, 1_000_000_000_000_000_000_000,
- "a", true, false, BigDecimal.new(5),
+ "a", true, false, BigDecimal(5),
:a, 1.day, Date.new(2001, 2, 3), Time.new(2002, 10, 31, 2, 2, 2, "+02:00"),
DateTime.new(2001, 2, 3, 4, 5, 6, "+03:00"),
ActiveSupport::TimeWithZone.new(Time.utc(1999, 12, 31, 23, 59, 59), ActiveSupport::TimeZone["UTC"]),
|
Fix "warning: BigDecimal.new is deprecated"
|
rails_rails
|
train
|
ca108e44b0e0d8a66de81c32b67b8fc28e7986d8
|
diff --git a/tgz_test.go b/tgz_test.go
index <HASH>..<HASH> 100644
--- a/tgz_test.go
+++ b/tgz_test.go
@@ -8,16 +8,8 @@ import (
"regexp"
"sort"
"testing"
-
- . "gopkg.in/check.v1"
)
-func Test(t *testing.T) { TestingT(t) }
-
-type SuiteTGZ struct{}
-
-var _ = Suite(&SuiteTGZ{})
-
func TestExtractError(t *testing.T) {
for i, test := range [...]struct {
tgz string
@@ -88,7 +80,7 @@ func TestExtract(t *testing.T) {
},
},
} {
- com := Commentf("%d) tgz path = %s", i, test.tgz)
+ com := fmt.Sprintf("%d) tgz path = %s", i, test.tgz)
path, err := Extract(test.tgz)
if err != nil {
|
no longer depends on go check, for real
|
alcortesm_tgz
|
train
|
c521d2fe6183abebe8a19de32968c11494ea13fb
|
diff --git a/addon/serializers/offline.js b/addon/serializers/offline.js
index <HASH>..<HASH> 100644
--- a/addon/serializers/offline.js
+++ b/addon/serializers/offline.js
@@ -65,17 +65,20 @@ export default DS.JSONSerializer.extend({
json[key] = `${value}`;
} else if (typeof value === 'undefined') {
json[key] = 'false';
+ } else {
+ this._super(snapshot, json, key, attribute);
}
break;
case 'decimal':
+
//Value should be a decimal number
if (typeof value === 'string') {
value = +(value.replace(',', '.'));
}
- if (isFinite(value)) {
+ if (isFinite(value) || typeof value === 'undefined') {
this._super(snapshot, json, key, attribute);
} else {
throw new Error(`Trying to save '${value}' value of '${key}' field of '${snapshot.modelName}' that should be a decimal`);
@@ -84,12 +87,13 @@ export default DS.JSONSerializer.extend({
break;
case 'number':
+
//Value should be a number
if (typeof value === 'string') {
value = +value;
}
- if (isFinite(value)) {
+ if (isFinite(value) || typeof value === 'undefined') {
this._super(snapshot, json, key, attribute);
} else {
throw new Error(`Trying to save '${value}' value of '${key}' field of '${snapshot.modelName}' that should be a number`);
|
Forgot about some cases
Numbers and decimals can be `undefined`. Booleans can be null.
|
Flexberry_ember-flexberry-data
|
train
|
db9ef08a8da60fe28b64bfa0136e7549d526f24d
|
diff --git a/actionview/test/template/digestor_test.rb b/actionview/test/template/digestor_test.rb
index <HASH>..<HASH> 100644
--- a/actionview/test/template/digestor_test.rb
+++ b/actionview/test/template/digestor_test.rb
@@ -17,8 +17,7 @@ class FixtureFinder < ActionView::LookupContext
FIXTURES_DIR = "#{File.dirname(__FILE__)}/../fixtures/digestor"
def initialize(details = {})
- prefixes = [FixtureFinder::FIXTURES_DIR]
- super(ActionView::PathSet.new(['digestor']), details, prefixes)
+ super(ActionView::PathSet.new(['digestor']), details, [])
end
end
|
the lookup context looks in the cwd, so prefix isn't necessary
|
rails_rails
|
train
|
0baba701cd42a75be7ba90080f3a3e7266fb45b5
|
diff --git a/fabric/connection.py b/fabric/connection.py
index <HASH>..<HASH> 100644
--- a/fabric/connection.py
+++ b/fabric/connection.py
@@ -453,6 +453,9 @@ class Connection(Context):
kwargs['sock'] = self.open_gateway()
if self.connect_timeout:
kwargs['timeout'] = self.connect_timeout
+ # Strip out empty defaults for less noisy debugging
+ if 'key_filename' in kwargs and not kwargs['key_filename']:
+ del kwargs['key_filename']
# Actually connect!
self.client.connect(**kwargs)
self.transport = self.client.get_transport()
diff --git a/fabric/main.py b/fabric/main.py
index <HASH>..<HASH> 100644
--- a/fabric/main.py
+++ b/fabric/main.py
@@ -32,7 +32,10 @@ class Fab(Program):
),
Argument(
names=('i', 'identity'),
- help="Path to runtime SSH identity (key) file.",
+ kind=list, # Same as OpenSSH, can give >1 key
+ # TODO: automatically add hint about iterable-ness to Invoke
+ # help display machinery?
+ help="Path to runtime SSH identity (key) file. May be given multiple times.", # noqa
),
]
return core_args + my_args
diff --git a/tests/_support/fabfile.py b/tests/_support/fabfile.py
index <HASH>..<HASH> 100644
--- a/tests/_support/fabfile.py
+++ b/tests/_support/fabfile.py
@@ -49,4 +49,12 @@ def expect_mutation(c):
@task
def expect_identity(c):
- assert c.config.connect_kwargs['key_filename'] == 'identity.key'
+ assert c.config.connect_kwargs['key_filename'] == ['identity.key']
+
+
+@task
+def expect_identities(c):
+ assert c.config.connect_kwargs['key_filename'] == [
+ 'identity.key',
+ 'identity2.key',
+ ]
diff --git a/tests/main.py b/tests/main.py
index <HASH>..<HASH> 100644
--- a/tests/main.py
+++ b/tests/main.py
@@ -61,6 +61,7 @@ Available tasks:
build
deploy
expect-from-env
+ expect-identities
expect-identity
expect-mutation
expect-mutation-to-fail
@@ -182,6 +183,8 @@ Available tasks:
with cd(_support):
fab_program.run("fab --identity identity.key expect-identity")
- def may_be_given_multiple_times_building_a_list(self):
- # TODO: when multiple-at-once is implemented in Invoke parser
- skip()
+ def may_be_given_multiple_times(self):
+ with cd(_support):
+ fab_program.run(
+ "fab -i identity.key -i identity2.key expect-identities"
+ )
|
Implement list-type behavior for --identity CLI flag.
Brings this up to parity with v1
|
fabric_fabric
|
train
|
16ca764f190b98ae70b14d486f9ca4b06a278a76
|
diff --git a/lib/link-writer.js b/lib/link-writer.js
index <HASH>..<HASH> 100644
--- a/lib/link-writer.js
+++ b/lib/link-writer.js
@@ -60,8 +60,14 @@ function create (me, lp, link) {
// directory, it's very possible that the thing we're linking to
// doesn't exist yet (especially if it was intended as a symlink),
// so swallow ENOENT errors here and just soldier in.
+ // Additionally, an EPERM or EACCES can happen on win32 if it's trying
+ // to make a link to a directory. Again, just skip it.
+ // A better solution would be to have fs.symlink be supported on
+ // windows in some nice fashion.
if (er) {
- if (er.code === "ENOENT" && process.platform === "win32") {
+ if ((er.code === "ENOENT" ||
+ er.code === "EACCES" ||
+ er.code === "EPERM" ) && process.platform === "win32") {
me.ready = true
me.emit("ready")
me.emit("end")
|
Fix isaacs/npm#<I> Symlinks can fail in many ways on windows.
|
npm_fstream
|
train
|
6cae894bc8ad5666509602573927219f73c64db3
|
diff --git a/js/browser.js b/js/browser.js
index <HASH>..<HASH> 100755
--- a/js/browser.js
+++ b/js/browser.js
@@ -1537,6 +1537,7 @@ class Browser {
// Build locus array (multi-locus view). Use the first track to extract the loci, any track could be used.
const locus = []
const gtexSelections = {}
+ let hasGtexSelections = false;
let anyTrackView = this.trackViews[0]
for (let {referenceFrame} of anyTrackView.viewports) {
const locusString = referenceFrame.getLocusString()
@@ -1547,12 +1548,11 @@ class Browser {
snp: referenceFrame.selection.snp
}
gtexSelections[locusString] = selection
+ hasGtexSelections = true;
}
}
json["locus"] = locus.length === 1 ? locus[0] : locus
-
- const gtexKeys = Object.getOwnPropertyNames(gtexSelections)
- if (gtexKeys.length > 0) {
+ if (hasGtexSelections) {
json["gtexSelections"] = gtexSelections
}
@@ -1576,27 +1576,21 @@ class Browser {
trackJson.push(config)
}
} catch (e) {
- errors.push(e)
+ console.error(`Track: ${track.name}: ${e}`)
+ errors.push(`Track: ${track.name}: ${e}`)
}
}
if (errors.length > 0) {
let n = 1
- let message = 'Errors encountered saving session:'
+ let message = 'Errors encountered saving session: </br>'
for (let e of errors) {
- message += ` (${n++}) ${e.toString()}.`
+ message += ` (${n++}) ${e.toString()} <br/>`
}
throw Error(message)
}
- const locaTrackFiles = trackJson.filter((track) => {
- track.url && FileUtils.isFile(track.url)
- })
-
- if (locaTrackFiles.length > 0) {
- throw new Error(`Error. Sessions cannot include local file references.`)
- }
json["tracks"] = trackJson
diff --git a/js/trackBase.js b/js/trackBase.js
index <HASH>..<HASH> 100644
--- a/js/trackBase.js
+++ b/js/trackBase.js
@@ -161,14 +161,15 @@ class TrackBase {
// Check for non-json-if-yable properties. Perhaps we should test what can be saved.
for (let key of Object.keys(state)) {
- if (typeof state[key] === 'function') {
+ const value = state[key]
+ if (typeof value === 'function') {
throw Error(`Property '${key}' of track '${this.name} is a function. Functions cannot be saved in sessions.`)
}
- if (FileUtils.isFile(state[key])) {
+ if (value instanceof File) { // Test specifically for File. Other types of File-like objects might be savable
const str = `Track ${this.name} is a local file. Sessions cannot be saved with local file references.`
throw Error(str)
}
- if (state[key] instanceof Promise) {
+ if (value instanceof Promise) {
throw Error(`Property '${key}' of track '${this.name} is a Promise. Promises cannot be saved in sessions.`)
}
}
|
Change to session state validation -- test for presence of File objects, not File-like objects.
|
igvteam_igv.js
|
train
|
7c579dd24dc18e6f633f0c2de725a10a39681936
|
diff --git a/lib/createsend/client.rb b/lib/createsend/client.rb
index <HASH>..<HASH> 100644
--- a/lib/createsend/client.rb
+++ b/lib/createsend/client.rb
@@ -134,11 +134,12 @@ module CreateSend
end
# Sets the monthly billing settings for this client.
- def set_monthly_billing(currency, client_pays, markup_percentage)
+ def set_monthly_billing(currency, client_pays, markup_percentage, monthly_scheme = nil)
options = { :body => {
:Currency => currency,
:ClientPays => client_pays,
- :MarkupPercentage => markup_percentage }.to_json }
+ :MarkupPercentage => markup_percentage,
+ :MonthlyScheme => monthly_scheme }.to_json } # monthly_scheme must be nil, Basic or Unlimited
put 'setmonthlybilling', options
end
diff --git a/test/client_test.rb b/test/client_test.rb
index <HASH>..<HASH> 100644
--- a/test/client_test.rb
+++ b/test/client_test.rb
@@ -23,6 +23,7 @@ class ClientTest < Test::Unit::TestCase
cl.BasicDetails.ContactName.should == "Client One (contact)"
cl.AccessDetails.Username.should == "clientone"
cl.AccessDetails.AccessLevel.should == 23
+ cl.BillingDetails.MonthlyScheme.should == "Basic"
end
should "get all campaigns" do
@@ -141,11 +142,32 @@ class ClientTest < Test::Unit::TestCase
@client.set_payg_billing "CAD", true, true, 150
end
- should "set monthly billing" do
+ should "set monthly billing (old)" do
stub_put(@api_key, "clients/#{@client.client_id}/setmonthlybilling.json", nil)
@client.set_monthly_billing "CAD", true, 150
end
+ should "set monthly billing (implicit)" do
+ stub_put(@api_key, "clients/#{@client.client_id}/setmonthlybilling.json", nil)
+ @client.set_monthly_billing "CAD", true, 150
+ request = FakeWeb.last_request.body
+ assert_equal("{\"Currency\":\"CAD\",\"ClientPays\":true,\"MarkupPercentage\":150,\"MonthlyScheme\":null}", request, "Request wasn't as expected")
+ end
+
+ should "set monthly billing (basic)" do
+ stub_put(@api_key, "clients/#{@client.client_id}/setmonthlybilling.json", nil)
+ @client.set_monthly_billing "CAD", true, 150, "Basic"
+ request = FakeWeb.last_request.body
+ assert_equal("{\"Currency\":\"CAD\",\"ClientPays\":true,\"MarkupPercentage\":150,\"MonthlyScheme\":\"Basic\"}", request, "Request wasn't as expected")
+ end
+
+ should "set monthly billing (unlimited)" do
+ stub_put(@api_key, "clients/#{@client.client_id}/setmonthlybilling.json", nil)
+ @client.set_monthly_billing "CAD", false, 120, "Unlimited"
+ request = FakeWeb.last_request.body
+ assert_equal("{\"Currency\":\"CAD\",\"ClientPays\":false,\"MarkupPercentage\":120,\"MonthlyScheme\":\"Unlimited\"}", request, "Request wasn't as expected")
+ end
+
should "delete a client" do
stub_delete(@api_key, "clients/#{@client.client_id}.json", nil)
@client.delete
diff --git a/test/fixtures/client_details.json b/test/fixtures/client_details.json
index <HASH>..<HASH> 100644
--- a/test/fixtures/client_details.json
+++ b/test/fixtures/client_details.json
@@ -21,6 +21,7 @@
"MarkupOnDelivery": 0.0,
"BaseDeliveryRate": 5.0,
"Currency": "USD",
- "BaseDesignSpamTestRate": 5.0
+ "BaseDesignSpamTestRate": 5.0,
+ "MonthlyScheme": "Basic"
}
}
|
CM-<I> - Change API wrappers to support basic / unlimited pricing
* Ruby version done.
|
campaignmonitor_createsend-ruby
|
train
|
22277bfa7828580bd8d02142422f2f8f6674a216
|
diff --git a/src/adafruit_blinka/board/odroidc2.py b/src/adafruit_blinka/board/odroidc2.py
index <HASH>..<HASH> 100644
--- a/src/adafruit_blinka/board/odroidc2.py
+++ b/src/adafruit_blinka/board/odroidc2.py
@@ -56,6 +56,8 @@ SCL = pin.I2C0_SCL
SCLK = pin.SPI0_SCLK
MOSI = pin.SPI0_MOSI
MISO = pin.SPI0_MISO
+SPI_CS0 = pin.GPIO229
+SPI_CS1 = pin.GPIO225
D0 = GPIOX_19
D1 = GPIOX_10
diff --git a/src/busio.py b/src/busio.py
index <HASH>..<HASH> 100755
--- a/src/busio.py
+++ b/src/busio.py
@@ -96,7 +96,7 @@ class SPI(Lockable):
elif detector.board.any_beaglebone:
from adafruit_blinka.microcontroller.am335x.pin import Pin
from adafruit_blinka.microcontroller.generic_linux.spi import SPI as _SPI
- elif board_id == ap_board.ORANGE_PI_PC:
+ elif board_id == ap_board.ORANGE_PI_PC or board_id == ap_board.ORANGE_PI_R1:
from adafruit_blinka.microcontroller.allwinner_h3.pin import Pin
from adafruit_blinka.microcontroller.generic_linux.spi import SPI as _SPI
elif board_id == ap_board.GIANT_BOARD:
@@ -105,6 +105,9 @@ class SPI(Lockable):
elif board_id == ap_board.CORAL_EDGE_TPU_DEV:
from adafruit_blinka.microcontroller.nxp_imx8m.pin import Pin
from adafruit_blinka.microcontroller.generic_linux.spi import SPI as _SPI
+ elif board_id == ap_board.ODROID_C2:
+ from adafruit_blinka.microcontroller.amlogic.s905.pin import Pin
+ from adafruit_blinka.microcontroller.generic_linux.spi import SPI as _SPI
else:
from machine import SPI as _SPI
from machine import Pin
|
SPI fixes for a couple boards
|
adafruit_Adafruit_Blinka
|
train
|
82a2b1a01957ccbfd3a6abe31133f49b9005bced
|
diff --git a/cake/bootstrap.php b/cake/bootstrap.php
index <HASH>..<HASH> 100644
--- a/cake/bootstrap.php
+++ b/cake/bootstrap.php
@@ -48,5 +48,5 @@ error_reporting(E_ALL & ~E_DEPRECATED);
$url = null;
- App::import('Core', array('Dispatcher'));
+ require CAKE . 'dispatcher.php';
?>
\ No newline at end of file
diff --git a/cake/libs/configure.php b/cake/libs/configure.php
index <HASH>..<HASH> 100644
--- a/cake/libs/configure.php
+++ b/cake/libs/configure.php
@@ -652,10 +652,6 @@ class Configure extends Object {
trigger_error(sprintf(__("Can't find application core file. Please create %score.php, and make sure it is readable by PHP.", true), CONFIGS), E_USER_ERROR);
}
- if (!include(CONFIGS . 'bootstrap.php')) {
- trigger_error(sprintf(__("Can't find application bootstrap file. Please create %sbootstrap.php, and make sure it is readable by PHP.", true), CONFIGS), E_USER_ERROR);
- }
-
if (Configure::read('Cache.disable') !== true) {
$cache = Cache::config('default');
@@ -692,6 +688,11 @@ class Configure extends Object {
}
Cache::config('default');
}
+
+ if (!include(CONFIGS . 'bootstrap.php')) {
+ trigger_error(sprintf(__("Can't find application bootstrap file. Please create %sbootstrap.php, and make sure it is readable by PHP.", true), CONFIGS), E_USER_ERROR);
+ }
+
Configure::buildPaths(compact(
'modelPaths', 'viewPaths', 'controllerPaths', 'helperPaths', 'componentPaths',
'behaviorPaths', 'pluginPaths', 'vendorPaths', 'localePaths', 'shellPaths'
|
Changing import of Dispatcher to direct require.
Modifying order of operations in Configure::__loadBootstrap()
moving inclusion of app/config/bootstrap.php after the creation of core cache configs. This allows App::import() to be used in the bootstrap file with cached paths.
|
cakephp_cakephp
|
train
|
634e3eef93c6b33f4649c22be1b03159d49a7c5b
|
diff --git a/squad/frontend/static/squad/filter.js b/squad/frontend/static/squad/filter.js
index <HASH>..<HASH> 100644
--- a/squad/frontend/static/squad/filter.js
+++ b/squad/frontend/static/squad/filter.js
@@ -29,9 +29,9 @@ function FilterController($scope, $attrs, $location) {
$scope.update = function() {
URL[$attrs.param] = $scope.filter
- URL.details = _.map($scope.details_visible, function(v, k) {
+ URL.details = _.sortBy(_.map($scope.details_visible, function(v, k) {
return k.replace('details-', '')
- }).join(',')
+ })).join(',')
$location.search(URL)
}
|
frontend: build: make "details" URL parameter independent of selection order
Depending on which order one expands details in the test results table,
we might get different URLs.
|
Linaro_squad
|
train
|
96a26ba48ef505e3fe096e3db97657bc91a59ed5
|
diff --git a/gwtquery-core/src/main/java/com/google/gwt/query/client/plugins/events/EventsListener.java b/gwtquery-core/src/main/java/com/google/gwt/query/client/plugins/events/EventsListener.java
index <HASH>..<HASH> 100644
--- a/gwtquery-core/src/main/java/com/google/gwt/query/client/plugins/events/EventsListener.java
+++ b/gwtquery-core/src/main/java/com/google/gwt/query/client/plugins/events/EventsListener.java
@@ -342,9 +342,10 @@ public class EventsListener implements EventListener {
}
} else {
LiveBindFunction liveBindFunction = liveBindFunctionByEventType.get(eventbits);
- liveBindFunction.removeBindFunctionForSelector(cssSelector);
+ if (liveBindFunction != null) {
+ liveBindFunction.removeBindFunctionForSelector(cssSelector);
+ }
}
-
}
public void dispatchEvent(Event event) {
@@ -395,13 +396,14 @@ public class EventsListener implements EventListener {
}
public void onBrowserEvent(Event event) {
+ double now = Duration.currentTimeMillis();
// Workaround for Issue_20
if (lastType == event.getTypeInt()
- && lastEvnt - Duration.currentTimeMillis() < 10
+ && now - lastEvnt < 10
&& "body".equalsIgnoreCase(element.getTagName())) {
return;
}
- lastEvnt = Duration.currentTimeMillis();
+ lastEvnt = now;
lastType = event.getTypeInt();
// Execute the original Gwt listener
|
Fixing an old bug in code which was making live methods without context fail the second time. And Fixing a NullPointerException when calling die in an element without live functions
|
ArcBees_gwtquery
|
train
|
4910fa1a1b504dafd1ca003d5f13612fd2fd0767
|
diff --git a/examples/rfic2009/rfic2009style.py b/examples/rfic2009/rfic2009style.py
index <HASH>..<HASH> 100644
--- a/examples/rfic2009/rfic2009style.py
+++ b/examples/rfic2009/rfic2009style.py
@@ -25,7 +25,7 @@ from pyte.reference import Field, Reference, REFERENCE
from pyte.reference import Footnote as PyteFootnote
from pyte.bibliography import Bibliography, BibliographyFormatter
from pyte.flowable import Flowable, FlowableStyle
-from pyte.float import Figure as PyteFigure, CaptionStyle, as_float
+from pyte.float import Figure as PyteFigure, CaptionStyle, Floating
from pyte.table import Tabular as PyteTabular, MIDDLE
from pyte.table import HTMLTabularData, CSVTabularData, TabularStyle, CellStyle
from pyte.draw import LineStyle, RED
@@ -376,7 +376,7 @@ class Figure(CustomElement):
figure = PyteFigure(document, self.get('path'), caption_text,
scale=scale, style=self.style('figure'),
caption_style=self.style('figure caption'))
- return as_float(figure)
+ return Floating(figure)
class Caption(NestedElement):
diff --git a/pyte/float.py b/pyte/float.py
index <HASH>..<HASH> 100644
--- a/pyte/float.py
+++ b/pyte/float.py
@@ -78,6 +78,16 @@ class Figure(Flowable, Referenceable):
return image_height + caption_height
-def as_float(flowable):
- flowable.float = True
- return flowable
+class Decorator(object):
+ def __new__(cls, decoratee, *args, **kwargs):
+ cls = type(cls.__name__ + decoratee.__class__.__name__,
+ (cls, decoratee.__class__), decoratee.__dict__)
+ return object.__new__(cls)
+
+ def __init__(self, decoratee, *args, **kwargs):
+ self._decoratee = decoratee
+
+
+class Floating(Decorator):
+ def flow(self, container):
+ super().flow(container._float_space)
diff --git a/pyte/flowable.py b/pyte/flowable.py
index <HASH>..<HASH> 100644
--- a/pyte/flowable.py
+++ b/pyte/flowable.py
@@ -14,9 +14,9 @@ class FlowableStyle(Style):
class Flowable(Styled):
style_class = FlowableStyle
- def __init__(self, style=None, float=False):
+ def __init__(self, style=None):
super().__init__(style)
- self.float = float
+ self.resume = False
@property
def page(self):
@@ -36,6 +36,13 @@ class Flowable(Styled):
def split(self):
yield self
+ def flow(self, container):
+ self.container = container
+ if not self.resume:
+ container.advance(float(self.get_style('space_above')))
+ self.render(container.canvas)
+ return container.advance(float(self.get_style('space_below')))
+
def render(self, canvas, offset=0):
raise NotImplementedError("virtual method not implemented in class %s" %
self.__class__.__name__)
diff --git a/pyte/layout.py b/pyte/layout.py
index <HASH>..<HASH> 100644
--- a/pyte/layout.py
+++ b/pyte/layout.py
@@ -109,26 +109,12 @@ class ContainerBase(RenderTarget):
coordinate system."""
return float(self.canvas.height) - self._flowable_offset
- def flow(self, flowable, continued=False, in_float_space=False):
+ def flow(self, flowable):
"""Flow `flowable` into this container and return the vertical space
- taken up by the flowable.
-
- `continued` indicates whether the flowable was already partially
- rendered (to a previous in this container's chain).
-
- If `flowable` is to be rendered as a float (`flowable.float` is `True`),
- it is forwarded to the float space associated with this container."""
- if flowable.float and not in_float_space:
- self._float_space.flow(flowable, in_float_space=True)
- return 0
- else:
- start_offset = self._flowable_offset
- flowable.container = self
- if not continued:
- self.advance(float(flowable.get_style('space_above')))
- flowable.render(self.canvas)
- self.advance(float(flowable.get_style('space_below')))
- return self._flowable_offset - start_offset
+ taken up by the flowable."""
+ start_offset = self._flowable_offset
+ flowable.flow(self)
+ return self._flowable_offset - start_offset
def render(self, canvas):
end_of_page = None
@@ -277,21 +263,17 @@ class Chain(RenderTarget):
return self._document
def render(self):
- continued = False
while self._container_index < len(self._containers):
container = self._containers[self._container_index]
self._container_index += 1
try:
while self._flowable_index < len(self.flowables):
flowable = self.flowables[self._flowable_index]
- container.flow(flowable, continued)
+ container.flow(flowable)
self._flowable_index += 1
- continued = False
except EndOfContainer:
- continued = True
if self._container_index > len(self._containers) - 1:
raise EndOfPage(self)
def add_container(self, container):
- assert isinstance(container, Container)
self._containers.append(container)
|
implement floats as a decorator of Flowable
* introduce Flowable.flow()
* the decorator overrides this method and passes the target container's float space
|
brechtm_rinohtype
|
train
|
0e14741afd6f62aef17e3707885e61e733ca2a74
|
diff --git a/pkg/storageops/gce/gce.go b/pkg/storageops/gce/gce.go
index <HASH>..<HASH> 100644
--- a/pkg/storageops/gce/gce.go
+++ b/pkg/storageops/gce/gce.go
@@ -272,7 +272,8 @@ func (s *gceOps) Enumerate(
ctx := context.Background()
found := false
- req := s.service.Disks.List(s.inst.Project, s.inst.Zone)
+ filter := generateListFilterFromLabels(labels)
+ req := s.service.Disks.List(s.inst.Project, s.inst.Zone).Filter(filter)
if err := req.Pages(ctx, func(page *compute.DiskList) error {
for _, disk := range page.Items {
if len(setIdentifier) == 0 {
@@ -597,3 +598,14 @@ func (s *gceOps) waitForAttach(
return devicePath.(string), nil
}
+
+// generateListFilterFromLabels create a filter string based off --filter documentation at
+// https://cloud.google.com/sdk/gcloud/reference/compute/disks/list
+func generateListFilterFromLabels(labels map[string]string) string {
+ var filter string
+ for k, v := range labels {
+ filter = fmt.Sprintf("%s(labels.%s eq %s)", filter, k, v)
+ }
+
+ return filter
+}
diff --git a/pkg/storageops/test/storageops.go b/pkg/storageops/test/storageops.go
index <HASH>..<HASH> 100644
--- a/pkg/storageops/test/storageops.go
+++ b/pkg/storageops/test/storageops.go
@@ -2,13 +2,20 @@ package test
import (
"fmt"
+ "strings"
"testing"
"time"
"github.com/libopenstorage/openstorage/pkg/storageops"
+ uuid "github.com/satori/go.uuid"
"github.com/stretchr/testify/require"
)
+var diskLabels = map[string]string{
+ "source": "openstorage-test",
+ "foo": "bar",
+}
+
func RunTest(drivers map[string]storageops.Ops,
diskTemplates map[string]map[string]interface{},
t *testing.T) {
@@ -63,36 +70,43 @@ func snapshot(t *testing.T, driver storageops.Ops, diskName string) {
}
func tags(t *testing.T, driver storageops.Ops, diskName string) {
- labels := map[string]string{
- "source": "openstorage-test",
- "foo": "bar",
- }
-
- err := driver.ApplyTags(diskName, labels)
+ err := driver.ApplyTags(diskName, diskLabels)
require.NoError(t, err, "failed to apply tags to disk")
tags, err := driver.Tags(diskName)
require.NoError(t, err, "failed to get tags for disk")
require.Len(t, tags, 2, "invalid number of labels found on disk")
- labelsToRemove := map[string]string{"foo": "bar"}
- err = driver.RemoveTags(diskName, labelsToRemove)
+ err = driver.RemoveTags(diskName, diskLabels)
require.NoError(t, err, "failed to remove tags from disk")
tags, err = driver.Tags(diskName)
require.NoError(t, err, "failed to get tags for disk")
- require.Len(t, tags, 1, "invalid number of labels found on disk")
+ require.Len(t, tags, 0, "invalid number of labels found on disk")
+
+ err = driver.ApplyTags(diskName, diskLabels)
+ require.NoError(t, err, "failed to apply tags to disk")
}
func enumerate(t *testing.T, driver storageops.Ops, diskName string) {
- disks, err := driver.Enumerate([]*string{&diskName}, nil, storageops.SetIdentifierNone)
- require.NoError(t, err, "failed to create disk")
- require.Len(t, disks, 1, "inspect returned invalid length")
+ disks, err := driver.Enumerate([]*string{&diskName}, diskLabels, storageops.SetIdentifierNone)
+ require.NoError(t, err, "failed to enumerate disk")
+ require.Len(t, disks, 1, "enumerate returned invalid length")
+
+ // enumerate with invalid labels
+ randomStr := uuid.NewV4().String()
+ randomStr = strings.Replace(randomStr, "-", "", -1)
+ invalidLabels := map[string]string{
+ fmt.Sprintf("key%s", randomStr): fmt.Sprintf("val%s", randomStr),
+ }
+ disks, err = driver.Enumerate([]*string{&diskName}, invalidLabels, storageops.SetIdentifierNone)
+ require.NoError(t, err, "failed to enumerate disk")
+ require.Len(t, disks, 0, "enumerate returned invalid length")
}
func inspect(t *testing.T, driver storageops.Ops, diskName string) {
disks, err := driver.Inspect([]*string{&diskName})
- require.NoError(t, err, "failed to create disk")
+ require.NoError(t, err, "failed to inspect disk")
require.Len(t, disks, 1, fmt.Sprintf("inspect returned invalid length: %d", len(disks)))
}
|
fix label based filtering for GCE enumerate
|
libopenstorage_openstorage
|
train
|
9bdd49c81e0d607b9e5d5c4a0ef90aa32fa80ab7
|
diff --git a/lib/http/public/javascripts/main.js b/lib/http/public/javascripts/main.js
index <HASH>..<HASH> 100644
--- a/lib/http/public/javascripts/main.js
+++ b/lib/http/public/javascripts/main.js
@@ -235,7 +235,7 @@ function refreshJobs(state, fn) {
*/
function pollStats(ms) {
- request('/stats', function(data){
+ request('./stats', function(data){
o('li.inactive .count').text(data.inactiveCount);
o('li.active .count').text(data.activeCount);
o('li.complete .count').text(data.completeCount);
|
Use relative stats route, otherwise kue app mounted at a subdirectory cannot request stats.
|
Automattic_kue
|
train
|
db0846a716693f1f72f6f3ad36bcf9b5622f7269
|
diff --git a/eventsourcing/infrastructure/datastore/cassandraengine.py b/eventsourcing/infrastructure/datastore/cassandraengine.py
index <HASH>..<HASH> 100644
--- a/eventsourcing/infrastructure/datastore/cassandraengine.py
+++ b/eventsourcing/infrastructure/datastore/cassandraengine.py
@@ -12,7 +12,7 @@ from eventsourcing.infrastructure.datastore.base import DatastoreSettings, Datas
class CassandraSettings(DatastoreSettings):
CASSANDRA_HOSTS = [h.strip() for h in os.getenv('CASSANDRA_HOSTS', 'localhost').split(',')]
CASSANDRA_PORT = int(os.getenv('CASSANDRA_PORT', 9042))
- CASSANDRA_PROTOCOL_VERSION = int(os.getenv('CASSANDRA_PROTOCOL_VERSION', 4))
+ CASSANDRA_PROTOCOL_VERSION = int(os.getenv('CASSANDRA_PROTOCOL_VERSION', 2))
CASSANDRA_DEFAULT_KEYSPACE = os.getenv('CASSANDRA_KEYSPACE', 'eventsourcing')
CASSANDRA_CONSISTENCY_LEVEL = os.getenv('CASSANDRA_CONSISTENCY_LEVEL', 'LOCAL_QUORUM')
CASSANDRA_REPLICATION_FACTOR = os.getenv('CASSANDRA_REPLICATION_FACTOR', 1)
|
Changed default protocol version to 2, since we only use LWT.
|
johnbywater_eventsourcing
|
train
|
b59138261ce27ed7b8dd09cc71c4803c12731d3e
|
diff --git a/djgunicorn/management/commands/gunserver.py b/djgunicorn/management/commands/gunserver.py
index <HASH>..<HASH> 100644
--- a/djgunicorn/management/commands/gunserver.py
+++ b/djgunicorn/management/commands/gunserver.py
@@ -1,25 +1,26 @@
from __future__ import print_function
import datetime
+import importlib
import sys
-from django.apps import apps
+from django.core.management import get_commands
from django.utils import six
from django.utils.encoding import get_system_encoding
from djgunicorn.gunicorn import run
-# Use staticfiles's command by default, but fall back to default if it is
-# not installed.
-if apps.is_installed('django.contrib.staticfiles'):
- from django.contrib.staticfiles.management.commands.runserver import (
- Command as BaseCommand,
- )
-else:
- from django.core.management.commands.runserver import (
- Command as BaseCommand,
- )
+# Use the active runserver command as base. This is generally provided by
+# staticfiles, but can be django.core if it's not installed, or even something
+# else if some third-party app overrides it.
+def get_command_class(name):
+ module = importlib.import_module('{app}.management.commands.{name}'.format(
+ app=get_commands()[name], name=name,
+ ))
+ return module.Command
+
+BaseCommand = get_command_class('runserver')
class Command(BaseCommand):
|
Use a cool way to lookup runserver dynamically
|
uranusjr_django-gunicorn
|
train
|
08cfb0d66dcf40d71d0be29372a019066e9ffcba
|
diff --git a/lib/bunyan.rb b/lib/bunyan.rb
index <HASH>..<HASH> 100644
--- a/lib/bunyan.rb
+++ b/lib/bunyan.rb
@@ -46,7 +46,7 @@ module Bunyan
alias_method :disabled=, :disabled
def disabled?
- disabled
+ !!disabled
end
def method_missing(method, *args, &block)
diff --git a/spec/bunyan_spec.rb b/spec/bunyan_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/bunyan_spec.rb
+++ b/spec/bunyan_spec.rb
@@ -98,6 +98,16 @@ describe 'bunyan logger configuration' do
end
end
+describe Bunyan::Logger, "#disabled?" do
+ it "should return false if nothing is set" do
+ Bunyan::Logger.configure do |config|
+ config.database 'my_database'
+ config.collection 'my_collection'
+ end
+ Bunyan::Logger.disabled?.should == false
+ end
+end
+
describe 'the database getter' do
it 'should allow setting of the database' do
Bunyan::Logger.configure do |config|
|
added #disabled? method and specs
|
ajsharp_bunyan
|
train
|
2aac5ef7d405e0606fdc5b976a6b55c0696c92b2
|
diff --git a/packages/victory-core/src/victory-util/label-helpers.js b/packages/victory-core/src/victory-util/label-helpers.js
index <HASH>..<HASH> 100644
--- a/packages/victory-core/src/victory-util/label-helpers.js
+++ b/packages/victory-core/src/victory-util/label-helpers.js
@@ -50,14 +50,30 @@ function getPadding(props, datum) {
}
function getOffset(props, datum) {
- const { polar } = props;
- const padding = polar ? getPolarPadding(props, datum) : getPadding(props, datum);
+ if (props.polar) {
+ return {};
+ }
+ const padding = getPadding(props, datum);
return {
dx: padding.x,
dy: padding.y
};
}
+function getPosition(props, datum) {
+ const { polar } = props;
+ const { x, y } = Helpers.scalePoint(props, datum);
+ if (!polar) {
+ return { x, y };
+ } else {
+ const polarPadding = getPolarPadding(props, datum);
+ return {
+ x: x + polarPadding.x,
+ y: y + polarPadding.y
+ };
+ }
+}
+
function getPolarPadding(props, datum) {
const { style } = props;
const degrees = getDegrees(props, datum);
@@ -158,7 +174,7 @@ function getProps(props, index) {
const angle = getAngle(props, datum);
const text = getText(props, datum, index);
const labelPlacement = getLabelPlacement(props);
- const { x, y } = Helpers.scalePoint(props, datum);
+ const { x, y } = getPosition(props, datum);
const { dx, dy } = getOffset(props, datum);
return {
angle,
|
revert dx dy change for polar charts
|
FormidableLabs_victory
|
train
|
b1d1cc489de11e4ddb279fbfee37d7b6ae72728b
|
diff --git a/src/DI/ConsoleExtension.php b/src/DI/ConsoleExtension.php
index <HASH>..<HASH> 100644
--- a/src/DI/ConsoleExtension.php
+++ b/src/DI/ConsoleExtension.php
@@ -46,7 +46,7 @@ class ConsoleExtension extends CompilerExtension
return Expect::structure([
'url' => Expect::string(),
'name' => Expect::string(),
- 'version' => Expect::string(),
+ 'version' => Expect::anyOf(Expect::string(), Expect::int(), Expect::float()),
'catchExceptions' => Expect::bool(),
'autoExit' => Expect::bool(),
'helperSet' => Expect::string(),
@@ -76,7 +76,7 @@ class ConsoleExtension extends CompilerExtension
}
if ($config->version !== null) {
- $applicationDef->addSetup('setVersion', [$config->version]);
+ $applicationDef->addSetup('setVersion', [(string) $config->version]);
}
if ($config->catchExceptions !== null) {
|
ConsoleExtension: allow console version to be numeric
|
contributte_console
|
train
|
2c950bc47b9a42c4372370746d748dbe65655df0
|
diff --git a/lib/rubocop/ast_node.rb b/lib/rubocop/ast_node.rb
index <HASH>..<HASH> 100644
--- a/lib/rubocop/ast_node.rb
+++ b/lib/rubocop/ast_node.rb
@@ -28,8 +28,9 @@ module Astrolabe
extend RuboCop::NodePattern::Macros
# define both Node.method_name(node), and also node.method_name
- def def_matcher(method_name, pattern_str)
- singleton_class.def_node_matcher method_name, pattern_str
+ def def_matcher(method_name, pattern)
+ filename, lineno = *caller.first.split(':')
+ singleton_class.def_node_matcher(method_name, pattern, filename, lineno)
class_eval("def #{method_name}; Node.#{method_name}(self); end")
end
end
diff --git a/lib/rubocop/node_pattern.rb b/lib/rubocop/node_pattern.rb
index <HASH>..<HASH> 100644
--- a/lib/rubocop/node_pattern.rb
+++ b/lib/rubocop/node_pattern.rb
@@ -420,11 +420,16 @@ module RuboCop
# yield to the block (passing any captures as block arguments).
# If the node matches, and no block is provided, the new method will
# return the captures, or `true` if there were none.
- def def_node_matcher(method_name, pattern_str)
+ def def_node_matcher(method_name, pattern_str, file = nil, lineno = nil)
compiler = RuboCop::NodePattern::Compiler.new(pattern_str, 'node')
- src = "def #{method_name}(node" << compiler.emit_trailing_params <<
- ');' << compiler.emit_method_code << ';end'
- class_eval(src)
+ src = "def #{method_name}(node" <<
+ compiler.emit_trailing_params <<
+ ');' <<
+ compiler.emit_method_code <<
+ ';end'
+
+ file, lineno = *caller.first.split(':') unless file && lineno
+ class_eval(src, file, lineno.to_i)
end
# Define a method which recurses over the descendants of an AST node,
@@ -445,8 +450,11 @@ module RuboCop
prelude = "return enum_for(:#{method_name}, node0" \
"#{compiler.emit_trailing_params}) unless block_given?"
end
- class_eval(node_search_body(method_name, compiler.emit_trailing_params,
- prelude, compiler.match_code, on_match))
+
+ src = node_search_body(method_name, compiler.emit_trailing_params,
+ prelude, compiler.match_code, on_match)
+ filename, lineno = *caller.first.split(':')
+ class_eval(src, filename, lineno.to_i)
end
def node_search_body(method_name, trailing_params, prelude, match_code,
|
NodePattern::Macros#def_node_matcher/#def_node_search track source location
This is useful in tools like Pry.
|
rubocop-hq_rubocop
|
train
|
ba4249c4f05bab2798e52fafb05d243f0fe8868b
|
diff --git a/phy/cluster/supervisor.py b/phy/cluster/supervisor.py
index <HASH>..<HASH> 100644
--- a/phy/cluster/supervisor.py
+++ b/phy/cluster/supervisor.py
@@ -262,8 +262,12 @@ class ClusterView(Table):
color: #86D16D;
}
- table tr[data-group='mua'], table tr[data-group='noise'] {
- color: #888;
+ table tr[data-group='mua'] {
+ color: #afafaf;
+ }
+
+ table tr[data-group='noise'] {
+ color: #777;
}
''')
|
Different colors for MUA and noise groups in cluster view
|
kwikteam_phy
|
train
|
5f32374e4d4d632169ad43cc28c42f4ac5170128
|
diff --git a/lib/reddit_bot.rb b/lib/reddit_bot.rb
index <HASH>..<HASH> 100644
--- a/lib/reddit_bot.rb
+++ b/lib/reddit_bot.rb
@@ -98,6 +98,36 @@ module RedditBot
end
end
+ # :yields: JSON objects: ["data"] part of post or self.post
+ def new_posts caching = false
+ cache = lambda do |id, &block|
+ next block.call unless caching
+ require "fileutils"
+ FileUtils.mkdir_p "cache"
+ filename = "cache/#{Digest::MD5.hexdigest id.inspect}"
+ next YAML.load File.read filename if File.exist? filename
+ block.call.tap do |data|
+ File.write filename, YAML.dump(data)
+ end
+ end
+ Enumerator.new do |e|
+ after = {}
+ loop do
+ args = [:get, "/r/#{@subreddit}/new", {limit: 100}.merge(after)]
+ result = cache.call(args){ json *args }
+ fail if result.keys != %w{ kind data }
+ fail if result["kind"] != "Listing"
+ fail if result["data"].keys != %w{ modhash children after before }
+ result["data"]["children"].each do |post|
+ fail "unknown type post['kind']: #{post["kind"]}" unless post["kind"] == "t3"
+ e << post["data"]
+ end
+ break unless marker = result["data"]["after"]
+ after = {after: marker}
+ end
+ end
+ end
+
# :yields: JSON objects: ["data"] part of post or self.post, top level comment (["children"] element)
def each_new_post_with_top_level_comments
json(:get, "/r/#{@subreddit}/new")["data"]["children"].each do |post|
|
new_posts methods for someone's unfinished ..( bot
|
Nakilon_reddit_bot
|
train
|
52c71f7b703cc4af85e825f741d4a55492d37803
|
diff --git a/lib/rocket_job/plugins/document.rb b/lib/rocket_job/plugins/document.rb
index <HASH>..<HASH> 100644
--- a/lib/rocket_job/plugins/document.rb
+++ b/lib/rocket_job/plugins/document.rb
@@ -15,10 +15,14 @@ module RocketJob
include RocketJob::Plugins::Document::Static
included do
- # Prevent data in MongoDB from re-defining the model behavior
+ # Prevent data in MongoDB from re-defining the model behavior.
self.static_keys = true
- # Turn off embedded callbacks. Slow and not used for Jobs
+ # Only save changes to this instance to prevent losing
+ # changes made by other processes or threads.
+ self.partial_updates = true
+
+ # Turn off embedded callbacks. Slow and not used by Jobs.
embedded_callbacks_off
end
|
Only save changes to prevent overwriting changes made by other processes.
|
rocketjob_rocketjob
|
train
|
b6b9f8cc0371eafa0e2f49a3abb6317d9fe31f10
|
diff --git a/README.md b/README.md
index <HASH>..<HASH> 100644
--- a/README.md
+++ b/README.md
@@ -12,5 +12,4 @@ A Go interface to [ZeroMQ](http://www.zeromq.org/) version 3.
* `zmq_socket_monitor()`
* `ZMQ_TCP_ACCEPT_FILTER`
- * `ZMQ_FD` on Windows
* Re-implementing the remaining examples for [ØMQ - The Guide](http://zguide.zeromq.org/page:all).
diff --git a/socketget_unix.go b/socketget_unix.go
index <HASH>..<HASH> 100644
--- a/socketget_unix.go
+++ b/socketget_unix.go
@@ -7,11 +7,9 @@ package zmq3
*/
import "C"
-
// ZMQ_FD: Retrieve file descriptor associated with the socket
//
// See: http://api.zeromq.org/3-2:zmq-getsockopt#toc23
func (soc *Socket) GetFd() (int, error) {
return soc.getInt(C.ZMQ_FD)
}
-
diff --git a/socketget_windows.go b/socketget_windows.go
index <HASH>..<HASH> 100644
--- a/socketget_windows.go
+++ b/socketget_windows.go
@@ -7,23 +7,20 @@ package zmq3
*/
import "C"
+import (
+ "unsafe"
+)
+
/*
ZMQ_FD: Retrieve file descriptor associated with the socket
See: http://api.zeromq.org/3-2:zmq-getsockopt#toc23
*/
-
-
-/*
func (soc *Socket) GetFd() (uintptr, error) {
- if !soc.opened {
- return uintptr(0), errSocClosed
- }
value := C.SOCKET(0)
size := C.size_t(unsafe.Sizeof(value))
- if i, err := C.zmq_getsockopt(soc.soc, C.ZMQ_FD, &value, &size); i != 0 {
+ if i, err := C.zmq_getsockopt(soc.soc, C.ZMQ_FD, unsafe.Pointer(&value), &size); i != 0 {
return uintptr(0), errget(err)
}
return uintptr(value), nil
}
-*/
|
ZMQ_FD on Windows (not tested)
|
pebbe_zmq3
|
train
|
99a9c213272b9a3793dbc2f89d72b57f940ba67b
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -4,7 +4,7 @@ from setuptools import setup, find_packages
INSTALL_REQUIRES = ("marshmallow>=2.15.2", "SQLAlchemy>=1.2.0")
EXTRAS_REQUIRE = {
- "tests": ["pytest", "pytest-lazy-fixture", "mock"],
+ "tests": ["pytest", "pytest-lazy-fixture"],
"lint": ["flake8==3.7.9", "flake8-bugbear==20.1.4", "pre-commit~=2.0"],
"docs": ["sphinx==3.0.2", "alabaster==0.7.12", "sphinx-issues==1.2.0"],
}
|
Remove unused mock dependency (#<I>)
|
marshmallow-code_marshmallow-sqlalchemy
|
train
|
97411e81b1ce12f33a08cf75e212ba5e0358530f
|
diff --git a/Package/Loader/LazyAssetPackageLoader.php b/Package/Loader/LazyAssetPackageLoader.php
index <HASH>..<HASH> 100644
--- a/Package/Loader/LazyAssetPackageLoader.php
+++ b/Package/Loader/LazyAssetPackageLoader.php
@@ -174,6 +174,10 @@ class LazyAssetPackageLoader implements LazyLoaderInterface
if (false === $data) {
$this->driver->cleanup();
+ if (!$this->verbose) {
+ $this->io->overwrite('', false);
+ }
+
return false;
}
|
Fix output without verbose option when package file is not found
|
fxpio_composer-asset-plugin
|
train
|
e42f240a1f883befb01ebce1b394950eeae7db98
|
diff --git a/commands/pull.py b/commands/pull.py
index <HASH>..<HASH> 100644
--- a/commands/pull.py
+++ b/commands/pull.py
@@ -19,16 +19,13 @@ from helpers.command import Command
from helpers.misc import do_pull
-@Command('pull', ['handler', 'is_admin', 'nick', 'botnick'])
+@Command('pull', ['handler', 'nick', 'botnick'], admin=True)
def cmd(send, _, args):
"""Pull changes.
Syntax: !pull <branch>
"""
- if not args['is_admin'](args['nick']):
- send("Nope, not gonna do it.")
- else:
- try:
- send(do_pull(args['handler'].srcdir, args['botnick']))
- except subprocess.CalledProcessError as e:
- for line in e.output.decode().splitlines():
- send(line)
+ try:
+ send(do_pull(args['handler'].srcdir, args['botnick']))
+ except subprocess.CalledProcessError as e:
+ for line in e.output.decode().splitlines():
+ send(line)
|
Convert pull to use the admin decorator arg
|
tjcsl_cslbot
|
train
|
813ee068b16bf5e4a699f1c7433cd5a0cc9046c4
|
diff --git a/km3pipe/io/pandas.py b/km3pipe/io/pandas.py
index <HASH>..<HASH> 100644
--- a/km3pipe/io/pandas.py
+++ b/km3pipe/io/pandas.py
@@ -75,7 +75,7 @@ class H5Chain(object):
def __enter__(self):
return self
- def __call__(self, n_evts=None, keys=None):
+ def __call__(self, n_evts=None, keys=None, ignore_events=False):
"""
Parameters
----------
@@ -94,14 +94,18 @@ class H5Chain(object):
n = n_evts
if isinstance(n_evts, dict):
n = n_evts[fname]
- max_id = np.unique(h5.root.event_info.read(field='event_id', stop=n))[-1]
- print(max_id)
+ if ignore_events:
+ max_id = n
+ else:
+ max_id = np.unique(
+ h5.root.event_info.read(field='event_id', stop=n)
+ )[-1]
# tables under '/', e.g. mc_tracks
for tab in h5.iter_nodes('/', classname='Table'):
tabname = tab.name
if keys is not None and tabname not in keys:
continue
- arr = _read_table(tab, max_id)
+ arr = _read_table(tab, max_id, ignore_events)
arr = pd.DataFrame.from_records(arr)
store[tabname].append(arr)
@@ -123,9 +127,11 @@ def map2df(map):
return pd.DataFrame.from_records(map, index=np.ones(1, dtype=int))
-def _read_table(tab, max_id=None):
- # takewhile(lambda x: x['event_id'] != max_id, tab.iterrows())
- return tab.read_where('event_id <= %d' % max_id)
+def _read_table(tab, max_id=None, ignore_events=False):
+ if ignore_events:
+ return tab[:max_id]
+ else:
+ return tab.read_where('event_id <= %d' % max_id)
def read_group(group, max_id=None, **kwargs):
|
add ignore_events for stupid non-km3-style tables
|
tamasgal_km3pipe
|
train
|
2b5f01828dbc4a9c0b7078e8e004179b500b63e3
|
diff --git a/src/acdhOeaw/schema/dissemination/CiriloService.php b/src/acdhOeaw/schema/dissemination/CiriloService.php
index <HASH>..<HASH> 100644
--- a/src/acdhOeaw/schema/dissemination/CiriloService.php
+++ b/src/acdhOeaw/schema/dissemination/CiriloService.php
@@ -28,11 +28,10 @@
* @license https://opensource.org/licenses/MIT
*/
-namespace acdhOeaw\schema\cirilo;
+namespace acdhOeaw\schema\dissemination;
use SimpleXMLElement;
use Exception;
-use EasyRdf\Resource;
use acdhOeaw\fedora\Fedora;
use acdhOeaw\schema\dissemination\Service as DissService;
use acdhOeaw\util\RepoConfig as RC;
@@ -93,19 +92,19 @@ class CiriloService extends Service {
$supports[] = (string) $i;
}
- $service = new DissService($fedora, $id, $location, $retMime, $supports);
+ $obj = new DissService($fedora, $id, $location, $retMime, $supports);
foreach ($service->xpath('./fmm:DatastreamInputParm') as $i) {
- self::parseParameter($service, $i);
+ self::parseParameter($obj, $i);
}
foreach ($service->xpath('./fmm:UserInputParm') as $i) {
- self::parseParameter($service, $i);
+ self::parseParameter($obj, $i);
}
foreach ($service->xpath('./fmm:DefaultInputParm') as $i) {
- self::parseParameter($service, $i);
+ self::parseParameter($obj, $i);
}
- return $service;
+ return $obj;
}
/**
|
Small fixes in CiriloServices
|
acdh-oeaw_repo-php-util
|
train
|
fc03b1f38c3d8df5f67030b248cae7107f573784
|
diff --git a/src/Controller/CrudController.php b/src/Controller/CrudController.php
index <HASH>..<HASH> 100644
--- a/src/Controller/CrudController.php
+++ b/src/Controller/CrudController.php
@@ -57,6 +57,7 @@ abstract class CrudController extends Controller
$this->configureListCriteria($request, $queryBuilder);
$pager = new Pagerfanta(new DoctrineORMAdapter($queryBuilder));
+ $pager->setMaxPerPage($request->get('resultsPerPage', $configuration->getResultsPerPage()));
$pager->setCurrentPage($request->get('page', 1));
/** @var Grid $grid */
diff --git a/src/Model/Configuration.php b/src/Model/Configuration.php
index <HASH>..<HASH> 100644
--- a/src/Model/Configuration.php
+++ b/src/Model/Configuration.php
@@ -82,6 +82,11 @@ class Configuration
private $templateVariables = [];
/**
+ * @var int
+ */
+ private $resultsPerPage = 10;
+
+ /**
* Constructor
*
* @param CrudController $controller
@@ -401,6 +406,25 @@ class Configuration
}
/**
+ * @return int
+ */
+ public function getResultsPerPage()
+ {
+ return $this->resultsPerPage;
+ }
+
+ /**
+ * @param int $resultsPerPage
+ *
+ * @return self
+ */
+ public function setResultsPerPage($resultsPerPage)
+ {
+ $this->resultsPerPage = $resultsPerPage;
+ return $this;
+ }
+
+ /**
* Validate the configuration
*
* @return void
diff --git a/src/Resources/doc/configuration.md b/src/Resources/doc/configuration.md
index <HASH>..<HASH> 100644
--- a/src/Resources/doc/configuration.md
+++ b/src/Resources/doc/configuration.md
@@ -82,3 +82,8 @@ the from and grid itself. The default translation domain is `"messages"`.
### `setTemplateVariables(array $vars)`
Extra variables that you want to pass to the Twig template.
+
+
+### `setResultsPerPage(int $resultsPerPage)`
+
+The number of results per page displayed by the grid. Defaults to 10.
\ No newline at end of file
|
Added possibility to configure the results per page
|
Prezent_prezent-crud-bundle
|
train
|
e1fbe11c973244b2722c32cbc97661709c48b576
|
diff --git a/gossipsub.go b/gossipsub.go
index <HASH>..<HASH> 100644
--- a/gossipsub.go
+++ b/gossipsub.go
@@ -199,27 +199,16 @@ func (gs *GossipSubRouter) Publish(from peer.ID, msg *pb.Message) {
// gossipsub peers
gmap, ok := gs.mesh[topic]
- if ok {
- // direct peers in the mesh for topic
- for p := range gmap {
- tosend[p] = struct{}{}
- }
- } else {
- // fanout peers, we are not in the mesh for topic
+ if !ok {
+ // we are not in the mesh for topic, use fanout peers
gmap, ok = gs.fanout[topic]
if !ok {
- // we don't have any yet, pick some
- var peers []peer.ID
- for p := range tmap {
- if gs.peers[p] == GossipSubID {
- peers = append(peers, p)
- }
- }
+ // we don't have any, pick some
+ peers := gs.getPeers(topic, func(peer.ID) bool { return true })
if len(peers) > 0 {
gmap = make(map[peer.ID]struct{})
- shufflePeers(peers)
for _, p := range peers[:GossipSubD] {
gmap[p] = struct{}{}
}
@@ -254,6 +243,24 @@ func (gs *GossipSubRouter) Publish(from peer.ID, msg *pb.Message) {
}
}
+func (gs *GossipSubRouter) getPeers(topic string, filter func(peer.ID) bool) []peer.ID {
+ tmap, ok := gs.p.topics[topic]
+ if !ok {
+ return nil
+ }
+
+ peers := make([]peer.ID, 0, len(tmap))
+ for p := range tmap {
+ if gs.peers[p] == GossipSubID && filter(p) {
+ peers = append(peers, p)
+ }
+ }
+
+ shufflePeers(peers)
+
+ return peers
+}
+
func (gs *GossipSubRouter) Join(topic string) {
// TODO
}
|
refactor Publish to use getPeers
|
libp2p_go-libp2p-pubsub
|
train
|
ad5ddaf55a71ebf640043d34c79cf3672c86be5c
|
diff --git a/src/_pytest/python_api.py b/src/_pytest/python_api.py
index <HASH>..<HASH> 100644
--- a/src/_pytest/python_api.py
+++ b/src/_pytest/python_api.py
@@ -531,17 +531,11 @@ def _is_numpy_array(obj):
Return true if the given object is a numpy array. Make a special effort to
avoid importing numpy unless it's really necessary.
"""
- import inspect
-
- for cls in inspect.getmro(type(obj)):
- if cls.__module__ == "numpy":
- try:
- import numpy as np
-
- return isinstance(obj, np.ndarray)
- except ImportError:
- pass
+ import sys
+ np = sys.modules.get("numpy")
+ if np is not None:
+ return isinstance(obj, np.ndarray)
return False
|
Simplify is_numpy_array as suggested in review
|
pytest-dev_pytest
|
train
|
3da0e3167796228719626b544333a2b5c63e9c11
|
diff --git a/hibernate-ogm-core/src/main/java/org/hibernate/ogm/grid/RowKey.java b/hibernate-ogm-core/src/main/java/org/hibernate/ogm/grid/RowKey.java
index <HASH>..<HASH> 100644
--- a/hibernate-ogm-core/src/main/java/org/hibernate/ogm/grid/RowKey.java
+++ b/hibernate-ogm-core/src/main/java/org/hibernate/ogm/grid/RowKey.java
@@ -31,15 +31,15 @@ import java.util.Arrays;
public final class RowKey implements Serializable {
private final String table;
- private final String[] columns;
+ private final String[] columnNames;
//column value types do have to be serializable so RowKey can be serializable
//should it be a Serializable[] type? It seems to be more pain than anything else
private final Object[] columnValues;
private final int hashCode;
- public RowKey(String table, String[] columns, Object[] columnValues) {
+ public RowKey(String table, String[] columnNames, Object[] columnValues) {
this.table = table;
- this.columns = columns;
+ this.columnNames = columnNames;
this.columnValues = columnValues;
this.hashCode = generateHashCode();
}
@@ -63,7 +63,7 @@ public final class RowKey implements Serializable {
if ( !Arrays.equals( columnValues, that.columnValues ) ) {
return false;
}
- if ( !Arrays.equals( columns, that.columns ) ) {
+ if ( !Arrays.equals( columnNames, that.columnNames ) ) {
return false;
}
@@ -85,7 +85,7 @@ public final class RowKey implements Serializable {
final StringBuilder sb = new StringBuilder();
sb.append( "RowKey" );
sb.append( "{table='" ).append( table ).append( '\'' );
- sb.append( ", columns=" ).append( columns == null ? "null" : Arrays.asList( columns ).toString() );
+ sb.append( ", columns=" ).append( columnNames == null ? "null" : Arrays.asList( columnNames ).toString() );
sb.append( ", columnValues=" )
.append( columnValues == null ? "null" : Arrays.asList( columnValues ).toString() );
sb.append( '}' );
|
OGM-<I> Rename internal properties of RowKey
|
hibernate_hibernate-ogm
|
train
|
0c7275da1a31835d69d3f26b79980934112504c1
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -30,7 +30,6 @@ setup(
'httptools>=0.0.9',
'ujson>=1.35',
'aiofiles>=0.3.0',
- 'multidict>=2.0',
],
classifiers=[
'Development Status :: 2 - Pre-Alpha',
|
Remove multidict requirement
This is no longer necessary after #<I>.
|
huge-success_sanic
|
train
|
61067d06e2be3d121de10c51608bb9c8b1e4bad3
|
diff --git a/tensorflow_probability/python/experimental/vi/surrogate_posteriors.py b/tensorflow_probability/python/experimental/vi/surrogate_posteriors.py
index <HASH>..<HASH> 100644
--- a/tensorflow_probability/python/experimental/vi/surrogate_posteriors.py
+++ b/tensorflow_probability/python/experimental/vi/surrogate_posteriors.py
@@ -157,14 +157,7 @@ def build_factored_surrogate_posterior(
`tfd.TransformedDistribution(underlying_dist, bijector)` if a
corresponding constraining bijector is specified, otherwise it is modeled
as supported on the unconstrained real line.
- constraining_bijectors: Optional `tfb.Bijector` instance, or nested
- structure of such instances, defining support(s) of the posterior
- variables. The structure must match that of `event_shape` and may
- contain `None` values. A posterior variable will
- be modeled as `tfd.TransformedDistribution(underlying_dist,
- constraining_bijector)` if a corresponding constraining bijector is
- specified, otherwise it is modeled as supported on the
- unconstrained real line.
+ constraining_bijectors: Deprecated alias for `bijector`.
initial_unconstrained_loc: Optional Python `callable` with signature
`tensor = initial_unconstrained_loc(shape, seed)` used to sample
real-valued initializations for the unconstrained representation of each
|
Update docstring to reflect `constraining_bijectors` deprecation.
PiperOrigin-RevId: <I>
|
tensorflow_probability
|
train
|
c381d20b0a3ec8e47c3db95ad94167c0ac079094
|
diff --git a/src/com/google/javascript/refactoring/FixingErrorManager.java b/src/com/google/javascript/refactoring/FixingErrorManager.java
index <HASH>..<HASH> 100644
--- a/src/com/google/javascript/refactoring/FixingErrorManager.java
+++ b/src/com/google/javascript/refactoring/FixingErrorManager.java
@@ -16,8 +16,6 @@
package com.google.javascript.refactoring;
-import static com.google.javascript.jscomp.CheckMissingAndExtraRequires.MISSING_REQUIRE_STRICT_WARNING;
-import static com.google.javascript.jscomp.CheckMissingAndExtraRequires.MISSING_REQUIRE_WARNING;
import static com.google.javascript.jscomp.ClosureCheckModule.REFERENCE_TO_SHORT_IMPORT_BY_LONG_NAME_INCLUDING_SHORT_NAME;
import static com.google.javascript.jscomp.lint.CheckExtraRequires.EXTRA_REQUIRE_WARNING;
@@ -100,13 +98,10 @@ public class FixingErrorManager extends BasicErrorManager {
boolean containsFixableShorthandModuleWarning = containsFixableShorthandModuleWarning();
Collection<SuggestedFix> fixes = new ArrayList<>();
for (JSError error : getErrors()) {
- // Sometimes code will produce a spurious extra-require or missing-require error,
+ // Sometimes code will produce a spurious extra-require error,
// as well as a warning about using a full namespace instead of a shorthand type. In this case
- // don't apply the extra/missing require fix.
- if (containsFixableShorthandModuleWarning
- && (error.getType().equals(EXTRA_REQUIRE_WARNING)
- || error.getType().equals(MISSING_REQUIRE_STRICT_WARNING)
- || error.getType().equals(MISSING_REQUIRE_WARNING))) {
+ // don't apply the extra require fix.
+ if (containsFixableShorthandModuleWarning && error.getType().equals(EXTRA_REQUIRE_WARNING)) {
// Don't apply this fix.
} else {
if (fixTypes == FixTypes.ONE_FIX && sureFixes.containsKey(error)) {
|
Remove referneces to legacy CheckMissingAndExtraRequires pass
This is no longer used in the linter/autofixer and is in preparation for being
removed completely.
PiperOrigin-RevId: <I>
|
google_closure-compiler
|
train
|
ba6bb9131fddab42c45535210f4cd13a4a7bc419
|
diff --git a/suds/sax/__init__.py b/suds/sax/__init__.py
index <HASH>..<HASH> 100644
--- a/suds/sax/__init__.py
+++ b/suds/sax/__init__.py
@@ -1,24 +1,23 @@
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the (LGPL) GNU Lesser General Public License as
-# published by the Free Software Foundation; either version 3 of the
-# License, or (at your option) any later version.
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the (LGPL) GNU Lesser General Public License as published by the
+# Free Software Foundation; either version 3 of the License, or (at your
+# option) any later version.
#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Library Lesser General Public License for more details at
-# ( http://www.gnu.org/licenses/lgpl.html ).
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Library Lesser General Public License
+# for more details at ( http://www.gnu.org/licenses/lgpl.html ).
#
# You should have received a copy of the GNU Lesser General Public License
-# along with this program; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# written by: Jeff Ortel ( jortel@redhat.com )
"""
The sax module contains a collection of classes that provide a (D)ocument
(O)bject (M)odel representation of an XML document. The goal is to provide an
-easy, intuitive interface for managing XML documents. Although, the term, DOM,
-is used above, this model is B{far} better.
+easy, intuitive interface for managing XML documents. Although the term DOM is
+used here, this model is B{far} better.
XML namespaces in suds are represented using a (2) element tuple containing the
prefix and the URI, e.g. I{('tns', 'http://myns')}
@@ -26,13 +25,12 @@ prefix and the URI, e.g. I{('tns', 'http://myns')}
@var encoder: A I{pluggable} XML special character processor used to encode/
decode strings.
@type encoder: L{Encoder}
+
"""
from suds.sax.enc import Encoder
-#
# pluggable XML special character encoder.
-#
encoder = Encoder()
@@ -40,25 +38,25 @@ def splitPrefix(name):
"""
Split the name into a tuple (I{prefix}, I{name}). The first element in the
tuple is I{None} when the name does not have a prefix.
+
@param name: A node name containing an optional prefix.
@type name: basestring
- @return: A tuple containing the (2) parts of I{name}
+ @return: A tuple containing the (2) parts of I{name}.
@rtype: (I{prefix}, I{name})
+
"""
- if isinstance(name, basestring) and ':' in name:
- return tuple(name.split(':', 1))
+ if isinstance(name, basestring) and ":" in name:
+ return tuple(name.split(":", 1))
return None, name
class Namespace:
- """
- The namespace class represents XML namespaces.
- """
+ """XML namespace."""
default = (None, None)
- xmlns = ('xml', 'http://www.w3.org/XML/1998/namespace')
- xsdns = ('xs', 'http://www.w3.org/2001/XMLSchema')
- xsins = ('xsi', 'http://www.w3.org/2001/XMLSchema-instance')
+ xmlns = ("xml", "http://www.w3.org/XML/1998/namespace")
+ xsdns = ("xs", "http://www.w3.org/2001/XMLSchema")
+ xsins = ("xsi", "http://www.w3.org/2001/XMLSchema-instance")
all = (xsdns, xsins)
@classmethod
@@ -72,7 +70,7 @@ class Namespace:
@classmethod
def xsd(cls, ns):
try:
- return cls.w3(ns) and ns[1].endswith('XMLSchema')
+ return cls.w3(ns) and ns[1].endswith("XMLSchema")
except Exception:
pass
return False
@@ -80,7 +78,7 @@ class Namespace:
@classmethod
def xsi(cls, ns):
try:
- return cls.w3(ns) and ns[1].endswith('XMLSchema-instance')
+ return cls.w3(ns) and ns[1].endswith("XMLSchema-instance")
except Exception:
pass
return False
@@ -92,7 +90,7 @@ class Namespace:
@classmethod
def w3(cls, ns):
try:
- return ns[1].startswith('http://www.w3.org')
+ return ns[1].startswith("http://www.w3.org")
except Exception:
pass
return False
|
tidy up up the suds.sax module (stylistic)
- PEP-8ified comments
- used double quotes consistently
|
suds-community_suds
|
train
|
654444f8232649c2ecd7c63e333183e63e2d2777
|
diff --git a/hazelcast/src/main/java/com/hazelcast/internal/dynamicconfig/DynamicConfigReplicationOperation.java b/hazelcast/src/main/java/com/hazelcast/internal/dynamicconfig/DynamicConfigReplicationOperation.java
index <HASH>..<HASH> 100644
--- a/hazelcast/src/main/java/com/hazelcast/internal/dynamicconfig/DynamicConfigReplicationOperation.java
+++ b/hazelcast/src/main/java/com/hazelcast/internal/dynamicconfig/DynamicConfigReplicationOperation.java
@@ -20,6 +20,7 @@ import com.hazelcast.config.ConfigDataSerializerHook;
import com.hazelcast.nio.ObjectDataInput;
import com.hazelcast.nio.ObjectDataOutput;
import com.hazelcast.nio.serialization.IdentifiedDataSerializable;
+import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import java.io.IOException;
@@ -28,6 +29,7 @@ public class DynamicConfigReplicationOperation extends AbstractDynamicConfigOper
private IdentifiedDataSerializable[] configs;
private ConfigCheckMode configCheckMode;
+ @SuppressFBWarnings("EI_EXPOSE_REP")
public DynamicConfigReplicationOperation(IdentifiedDataSerializable[] configs, ConfigCheckMode configCheckMode) {
this.configs = configs;
this.configCheckMode = configCheckMode;
|
Findbugs suppression.
Reasoning:
It's intentional. The service always creates a new array. And items
inside the array are immutable.
|
hazelcast_hazelcast
|
train
|
d95e32f97f7f6a5bdee891bac5372f2719229eb5
|
diff --git a/Controller/DefaultController.php b/Controller/DefaultController.php
index <HASH>..<HASH> 100644
--- a/Controller/DefaultController.php
+++ b/Controller/DefaultController.php
@@ -266,12 +266,11 @@ class DefaultController extends Controller
*/
public function removeServicoUnidade(Request $request, Servico $servico, TranslatorInterface $translator)
{
+ $em = $this->getDoctrine()->getManager();
$unidade = $this->getUser()->getLotacao()->getUnidade();
$envelope = new Envelope();
- $su = $this
- ->getDoctrine()
- ->getManager()
+ $su = $em
->getRepository(ServicoUnidade::class)
->get($unidade, $servico);
@@ -283,21 +282,31 @@ class DefaultController extends Controller
throw new Exception($translator->trans('error.cannot_remove_disabled_service', [], self::DOMAIN));
}
- $contador = $this
- ->getDoctrine()
- ->getManager()
- ->getRepository(Contador::class)
- ->findOneBy([
- 'unidade' => $unidade,
- 'servico' => $servico,
- ]);
-
- $em = $this->getDoctrine()->getManager();
- $em->remove($su);
- if ($contador) {
- $em->remove($contador);
- }
- $em->flush();
+ $em->transactional(function ($em) use ($su, $unidade, $servico) {
+ $em->remove($su);
+
+ $em
+ ->createQueryBuilder()
+ ->delete(Contador::class, 'e')
+ ->where('e.unidade = :unidade AND e.servico = :servico')
+ ->setParameters([
+ 'unidade' => $unidade,
+ 'servico' => $servico,
+ ])
+ ->getQuery()
+ ->execute();
+
+ $em
+ ->createQueryBuilder()
+ ->delete(ServicoUsuario::class, 'e')
+ ->where('e.unidade = :unidade AND e.servico = :servico')
+ ->setParameters([
+ 'unidade' => $unidade,
+ 'servico' => $servico,
+ ])
+ ->getQuery()
+ ->execute();
+ });
return $this->json($envelope);
}
|
removing user x unity x service
|
novosga_settings-bundle
|
train
|
ff334dfa7d1687a923f476db1166acd47e7ee059
|
diff --git a/src/javascripts/frigging_bootstrap/components/timepicker.js b/src/javascripts/frigging_bootstrap/components/timepicker.js
index <HASH>..<HASH> 100644
--- a/src/javascripts/frigging_bootstrap/components/timepicker.js
+++ b/src/javascripts/frigging_bootstrap/components/timepicker.js
@@ -11,12 +11,19 @@ export default class extends React.Component {
_input() {
return input(Object.assign({}, this.props.inputHtml, {
- valueLink: this.props.valueLink,
- className: cx(this.props.inputHtml.className, "form-control"),
+ valueLink: {
+ value: this.props.valueLink.value,
+ requestChange: this._onTimeChange,
+ },
+ className: cx(this.props.inputHtml.className, "form-control"),
})
)
}
+ _onTimeChange(newTime) {
+ console.log(`New Time ${newTime}`)
+ }
+
render() {
return div({className: cx(sizeClassNames(this.props))},
div({className: formGroupCx(this.props)},
|
Add ValueLink To On Change For Timepicker
|
frig-js_frig
|
train
|
927786ebf01876a44c9018599d2451c0f5a39d22
|
diff --git a/src/Gdbots/Pbj/Serializer/YamlSerializer.php b/src/Gdbots/Pbj/Serializer/YamlSerializer.php
index <HASH>..<HASH> 100644
--- a/src/Gdbots/Pbj/Serializer/YamlSerializer.php
+++ b/src/Gdbots/Pbj/Serializer/YamlSerializer.php
@@ -13,7 +13,19 @@ class YamlSerializer extends PhpArraySerializer
*/
public function serialize(Message $message, array $options = [])
{
- return Yaml::dump(parent::serialize($message, $options));
+ if (!isset($options['yaml_inline'])) {
+ $options['yaml_inline'] = 3;
+ }
+
+ if (!isset($options['yaml_indent'])) {
+ $options['yaml_indent'] = 2;
+ }
+
+ return Yaml::dump(
+ parent::serialize($message, $options),
+ (int) $options['yaml_inline'],
+ (int) $options['yaml_indent']
+ );
}
/**
|
go yaml inline at 3 levels
|
gdbots_pbj-php
|
train
|
32953f37b638dfc3dae56bd127b930aae1b90a9c
|
diff --git a/mod/quiz/attemptlib.php b/mod/quiz/attemptlib.php
index <HASH>..<HASH> 100644
--- a/mod/quiz/attemptlib.php
+++ b/mod/quiz/attemptlib.php
@@ -441,9 +441,19 @@ class quiz_attempt {
protected $quizobj;
protected $attempt;
- // More details of what happened for each question.
+ /** @var question_usage_by_activity the question usage for this quiz attempt. */
protected $quba;
- protected $pagelayout; // Array page no => array of numbers on the page in order.
+
+ /** @var array page no => array of slot numbers on the page in order. */
+ protected $pagelayout;
+
+ /** @var array slot => displayed question number for this slot. (E.g. 1, 2, 3 or 'i'.) */
+ protected $questionnumbers;
+
+ /** @var array slot => page number for this slot. */
+ protected $questionpages;
+
+ /** @var mod_quiz_display_options cache for the appropriate review options. */
protected $reviewoptions = null;
// Constructor =============================================================
@@ -545,12 +555,12 @@ class quiz_attempt {
foreach ($slots as $slot) {
$question = $this->quba->get_question($slot);
if ($question->length > 0) {
- $question->_number = $number;
+ $this->questionnumbers[$slot] = $number;
$number += $question->length;
} else {
- $question->_number = get_string('infoshort', 'quiz');
+ $this->questionnumbers[$slot] = get_string('infoshort', 'quiz');
}
- $question->_page = $page;
+ $this->questionpages[$slot] = $page;
}
}
}
@@ -906,16 +916,20 @@ class quiz_attempt {
}
/**
- * Return the grade obtained on a particular question, if the user is permitted
- * to see it. You must previously have called load_question_states to load the
- * state data about this question.
- *
* @param int $slot the number used to identify this question within this attempt.
- * @return string the formatted grade, to the number of decimal places specified
- * by the quiz.
+ * @return string the displayed question number for the question in this slot.
+ * For example '1', '2', '3' or 'i'.
*/
public function get_question_number($slot) {
- return $this->quba->get_question($slot)->_number;
+ return $this->questionnumbers[$slot];
+ }
+
+ /**
+ * @param int $slot the number used to identify this question within this attempt.
+ * @return int the page of the quiz this question appears on.
+ */
+ public function get_question_page($slot) {
+ return $this->questionpages[$slot];
}
/**
@@ -1047,7 +1061,7 @@ class quiz_attempt {
*/
public function start_attempt_url($slot = null, $page = -1) {
if ($page == -1 && !is_null($slot)) {
- $page = $this->quba->get_question($slot)->_page;
+ $page = $this->get_question_page($slot);
} else {
$page = 0;
}
@@ -1162,7 +1176,7 @@ class quiz_attempt {
public function render_question($slot, $reviewing, $thispageurl = null) {
return $this->quba->render_question($slot,
$this->get_display_options_with_edit_link($reviewing, $slot, $thispageurl),
- $this->quba->get_question($slot)->_number);
+ $this->get_question_number($slot));
}
/**
@@ -1178,7 +1192,7 @@ class quiz_attempt {
public function render_question_at_step($slot, $seq, $reviewing, $thispageurl = '') {
return $this->quba->render_question_at_step($slot, $seq,
$this->get_display_options($reviewing),
- $this->quba->get_question($slot)->_number);
+ $this->get_question_number($slot));
}
/**
@@ -1191,7 +1205,7 @@ class quiz_attempt {
$options->hide_all_feedback();
$options->manualcomment = question_display_options::EDITABLE;
return $this->quba->render_question($slot, $options,
- $this->quba->get_question($slot)->_number);
+ $this->get_question_number($slot));
}
/**
@@ -1483,7 +1497,7 @@ class quiz_attempt {
// Fix up $page.
if ($page == -1) {
if (!is_null($slot) && !$showall) {
- $page = $this->quba->get_question($slot)->_page;
+ $page = $this->get_question_page($slot);
} else {
$page = 0;
}
@@ -1574,14 +1588,14 @@ abstract class quiz_nav_panel_base {
$button = new quiz_nav_question_button();
$button->id = 'quiznavbutton' . $slot;
- $button->number = $qa->get_question()->_number;
+ $button->number = $this->attemptobj->get_question_number($slot);
$button->stateclass = $qa->get_state_class($showcorrectness);
$button->navmethod = $this->attemptobj->get_navigation_method();
if (!$showcorrectness && $button->stateclass == 'notanswered') {
$button->stateclass = 'complete';
}
$button->statestring = $this->get_state_string($qa, $showcorrectness);
- $button->currentpage = $qa->get_question()->_page == $this->page;
+ $button->currentpage = $this->attemptobj->get_question_page($slot) == $this->page;
$button->flagged = $qa->is_flagged();
$button->url = $this->get_question_url($slot);
$buttons[] = $button;
|
MDL-<I> quiz cleanup: remove ugly question/page number hack
It used to be necessary to store the question number and question page
by adding them as random extra fields on the question object (but
I can no longer remember what that reason was). Now it is possible
to store this sensibly in the quiz_attempt object, which is much
cleaner, so do that.
|
moodle_moodle
|
train
|
4bc6ecb2415e6fa3e750fa8b67059e6804952059
|
diff --git a/pkg/api/http_server.go b/pkg/api/http_server.go
index <HASH>..<HASH> 100644
--- a/pkg/api/http_server.go
+++ b/pkg/api/http_server.go
@@ -170,7 +170,6 @@ func (hs *HttpServer) newMacaron() *macaron.Macaron {
m.Use(hs.metricsEndpoint)
m.Use(middleware.GetContextHandler())
m.Use(middleware.Sessioner(&setting.SessionOptions))
- m.Use(middleware.RequestMetrics())
m.Use(middleware.OrgRedirect())
// needs to be after context handler
diff --git a/pkg/api/route_register.go b/pkg/api/route_register.go
index <HASH>..<HASH> 100644
--- a/pkg/api/route_register.go
+++ b/pkg/api/route_register.go
@@ -3,6 +3,7 @@ package api
import (
"net/http"
+ "github.com/grafana/grafana/pkg/middleware"
macaron "gopkg.in/macaron.v1"
)
@@ -68,13 +69,15 @@ func (rr *routeRegister) Register(router Router) *macaron.Router {
}
func (rr *routeRegister) route(pattern, method string, handlers ...macaron.Handler) {
- //inject metrics
//inject tracing
+ h := append(rr.subfixHandlers, handlers...)
+ h = append([]macaron.Handler{middleware.RequestMetrics(pattern)}, h...)
+
rr.routes = append(rr.routes, route{
method: method,
pattern: rr.prefix + pattern,
- handlers: append(rr.subfixHandlers, handlers...),
+ handlers: h,
})
}
diff --git a/pkg/metrics/graphitebridge/graphite.go b/pkg/metrics/graphitebridge/graphite.go
index <HASH>..<HASH> 100644
--- a/pkg/metrics/graphitebridge/graphite.go
+++ b/pkg/metrics/graphitebridge/graphite.go
@@ -54,7 +54,6 @@ const (
)
var metricCategoryPrefix []string = []string{"proxy_", "api_", "page_", "alerting_", "aws_", "db_", "stat_", "go_", "process_"}
-var ignorePrefix []string = []string{"http_"}
// Config defines the Graphite bridge config.
type Config struct {
@@ -206,18 +205,6 @@ func (b *Bridge) writeMetrics(w io.Writer, mfs []*dto.MetricFamily, prefix strin
return err
}
- ignoreThisMetric := false
- for _, v := range ignorePrefix {
- if strings.HasPrefix(mf.GetName(), v) {
- ignoreThisMetric = true
- break
- }
- }
-
- if ignoreThisMetric {
- continue
- }
-
buf := bufio.NewWriter(w)
for _, s := range vec {
if err := writePrefix(buf, prefix); err != nil {
diff --git a/pkg/metrics/metrics.go b/pkg/metrics/metrics.go
index <HASH>..<HASH> 100644
--- a/pkg/metrics/metrics.go
+++ b/pkg/metrics/metrics.go
@@ -91,7 +91,7 @@ func init() {
Name: "http_request_total",
Help: "http request counter",
},
- []string{"code", "method"},
+ []string{"handler", "statuscode", "method"},
)
M_Http_Request_Summary = prometheus.NewSummaryVec(
@@ -99,7 +99,7 @@ func init() {
Name: "http_request_duration",
Help: "http request summary",
},
- []string{"code", "method"},
+ []string{"handler", "statuscode", "method"},
)
M_Api_User_SignUpStarted = prometheus.NewCounter(prometheus.CounterOpts{
diff --git a/pkg/middleware/request_metrics.go b/pkg/middleware/request_metrics.go
index <HASH>..<HASH> 100644
--- a/pkg/middleware/request_metrics.go
+++ b/pkg/middleware/request_metrics.go
@@ -10,7 +10,7 @@ import (
"gopkg.in/macaron.v1"
)
-func RequestMetrics() macaron.Handler {
+func RequestMetrics(handler string) macaron.Handler {
return func(res http.ResponseWriter, req *http.Request, c *macaron.Context) {
rw := res.(macaron.ResponseWriter)
now := time.Now()
@@ -20,8 +20,8 @@ func RequestMetrics() macaron.Handler {
code := sanitizeCode(status)
method := sanitizeMethod(req.Method)
- metrics.M_Http_Request_Total.WithLabelValues(code, method).Inc()
- metrics.M_Http_Request_Summary.WithLabelValues(code, method).Observe(time.Since(now).Seconds())
+ metrics.M_Http_Request_Total.WithLabelValues(handler, code, method).Inc()
+ metrics.M_Http_Request_Summary.WithLabelValues(handler, code, method).Observe(time.Since(now).Seconds())
if strings.HasPrefix(req.RequestURI, "/api/datasources/proxy") {
countProxyRequests(status)
|
adds metric middlware to route register
|
grafana_grafana
|
train
|
7a7d31bf34900de57285749c553b258fefb4465f
|
diff --git a/lib/Models/ArcGisCatalogGroup.js b/lib/Models/ArcGisCatalogGroup.js
index <HASH>..<HASH> 100644
--- a/lib/Models/ArcGisCatalogGroup.js
+++ b/lib/Models/ArcGisCatalogGroup.js
@@ -3,18 +3,14 @@
/*global require*/
var URI = require('URIjs');
-var Cartesian3 = require('terriajs-cesium/Source/Core/Cartesian3');
var clone = require('terriajs-cesium/Source/Core/clone');
var defined = require('terriajs-cesium/Source/Core/defined');
var definedNotNull = require('terriajs-cesium/Source/Core/definedNotNull');
var defineProperties = require('terriajs-cesium/Source/Core/defineProperties');
-var Ellipsoid = require('terriajs-cesium/Source/Core/Ellipsoid');
var freezeObject = require('terriajs-cesium/Source/Core/freezeObject');
var knockout = require('terriajs-cesium/Source/ThirdParty/knockout');
var loadJson = require('terriajs-cesium/Source/Core/loadJson');
-var Rectangle = require('terriajs-cesium/Source/Core/Rectangle');
var when = require('terriajs-cesium/Source/ThirdParty/when');
-var WebMercatorProjection = require('terriajs-cesium/Source/Core/WebMercatorProjection');
var ModelError = require('./ModelError');
var CatalogGroup = require('./CatalogGroup');
|
Fix jshint warnings.
|
TerriaJS_terriajs
|
train
|
b4052a7577c35755098de9d9cec8f637c0eba892
|
diff --git a/lib/genevalidator.rb b/lib/genevalidator.rb
index <HASH>..<HASH> 100644
--- a/lib/genevalidator.rb
+++ b/lib/genevalidator.rb
@@ -413,6 +413,32 @@ module GeneValidator
fail NoValidationError if query_output.validations.length == 0
# compute validation score
+ compute_scores(query_output)
+ query_output
+
+ rescue ValidationClassError => error
+ $stderr.print "Class Type error at #{error.backtrace[0].scan(/\/([^\/]+:\d+):.*/)[0][0]}. "\
+ "Possible cause: type of one of the validations is not ValidationTest\n"
+ exit 1
+ rescue NoValidationError => error
+ $stderr.print "Validation error at #{error.backtrace[0].scan(/\/([^\/]+:\d+):.*/)[0][0]}. "\
+ "Possible cause: your -v arguments are not valid aliases\n"
+ exit 1
+ rescue ReportClassError => error
+ $stderr.print "Class Type error at #{error.backtrace[0].scan(/\/([^\/]+:\d+):.*/)[0][0]}. "\
+ "Possible cause: type of one of the validation reports returned by the 'run' method is not ValidationReport\n"
+ exit 1
+ rescue AliasDuplicationError => error
+ $stderr.print "Alias Duplication error at #{error.backtrace[0].scan(/\/([^\/]+:\d+):.*/)[0][0]}. "\
+ "Possible cause: At least two validations have the same CLI alias\n"
+ exit 1
+ rescue Exception => error
+ puts error.backtrace
+ $stderr.print "Error at #{error.backtrace[0].scan(/\/([^\/]+:\d+):.*/)[0][0]}.\n"
+ exit 1
+ end
+
+ def compute_scores(query_output)
validations = query_output.validations
successes = validations.map { |v| v.result == v.expected }.count(true)
@@ -439,29 +465,6 @@ module GeneValidator
query_output.successes = successes
query_output.fails = fails
query_output.overall_score = (successes * 100 / (successes + fails + 0.0)).round(0)
-
- query_output
-
- rescue ValidationClassError => error
- $stderr.print "Class Type error at #{error.backtrace[0].scan(/\/([^\/]+:\d+):.*/)[0][0]}. "\
- "Possible cause: type of one of the validations is not ValidationTest\n"
- exit 1
- rescue NoValidationError => error
- $stderr.print "Validation error at #{error.backtrace[0].scan(/\/([^\/]+:\d+):.*/)[0][0]}. "\
- "Possible cause: your -v arguments are not valid aliases\n"
- exit 1
- rescue ReportClassError => error
- $stderr.print "Class Type error at #{error.backtrace[0].scan(/\/([^\/]+:\d+):.*/)[0][0]}. "\
- "Possible cause: type of one of the validation reports returned by the 'run' method is not ValidationReport\n"
- exit 1
- rescue AliasDuplicationError => error
- $stderr.print "Alias Duplication error at #{error.backtrace[0].scan(/\/([^\/]+:\d+):.*/)[0][0]}. "\
- "Possible cause: At least two validations have the same CLI alias\n"
- exit 1
- rescue Exception => error
- puts error.backtrace
- $stderr.print "Error at #{error.backtrace[0].scan(/\/([^\/]+:\d+):.*/)[0][0]}.\n"
- exit 1
end
end
end
|
refactor do_validation method
|
wurmlab_genevalidator
|
train
|
e7263884ec7c40af93a92ff523d771a0f148c871
|
diff --git a/languagetool-language-modules/de/src/main/java/org/languagetool/rules/de/GermanSpellerRule.java b/languagetool-language-modules/de/src/main/java/org/languagetool/rules/de/GermanSpellerRule.java
index <HASH>..<HASH> 100644
--- a/languagetool-language-modules/de/src/main/java/org/languagetool/rules/de/GermanSpellerRule.java
+++ b/languagetool-language-modules/de/src/main/java/org/languagetool/rules/de/GermanSpellerRule.java
@@ -2015,8 +2015,10 @@ public class GermanSpellerRule extends CompoundAwareHunspellRule {
String word = words.get(idx);
String nextWord = getWordAfterEnumerationOrNull(words, idx+1);
nextWord = StringUtils.removeEnd(nextWord, ".");
-
- boolean isCompound = nextWord != null && (compoundTokenizer.tokenize(nextWord).size() > 1 || nextWord.indexOf('-') > 0);
+ boolean isCompound = nextWord != null &&
+ (compoundTokenizer.tokenize(nextWord).size() > 1 ||
+ nextWord.indexOf('-') > 0 ||
+ nextWord.matches("[A-ZÖÄÜ][a-zöäüß]{2,}(ei|öl)$")); // compound tokenizer will only split compounds where each part is >= 3 characters...
if (isCompound) {
word = StringUtils.removeEnd(word, "-");
boolean isMisspelled = !hunspell.spell(word); // "Stil- und Grammatikprüfung" or "Stil-, Text- und Grammatikprüfung"
diff --git a/languagetool-language-modules/de/src/test/java/org/languagetool/rules/de/GermanSpellerRuleTest.java b/languagetool-language-modules/de/src/test/java/org/languagetool/rules/de/GermanSpellerRuleTest.java
index <HASH>..<HASH> 100644
--- a/languagetool-language-modules/de/src/test/java/org/languagetool/rules/de/GermanSpellerRuleTest.java
+++ b/languagetool-language-modules/de/src/test/java/org/languagetool/rules/de/GermanSpellerRuleTest.java
@@ -581,6 +581,7 @@ public class GermanSpellerRuleTest {
assertEquals(0, rule.match(lt.getAnalyzedSentence("Ist doch - gut")).length);
assertEquals(0, rule.match(lt.getAnalyzedSentence("Ist doch -- gut")).length);
assertEquals(0, rule.match(lt.getAnalyzedSentence("Stil- und Grammatikprüfung gut")).length);
+ assertEquals(0, rule.match(lt.getAnalyzedSentence("Oliven- und Mandelöl")).length);
assertEquals(0, rule.match(lt.getAnalyzedSentence("Stil-, Text- und Grammatikprüfung gut")).length);
assertEquals(0, rule.match(lt.getAnalyzedSentence("Er liebt die Stil-, Text- und Grammatikprüfung.")).length);
assertEquals(0, rule.match(lt.getAnalyzedSentence("Stil-, Text- und Grammatikprüfung")).length);
|
[de] fix false alarm with rare compounds where one part is only 2 characters long (#<I>)
|
languagetool-org_languagetool
|
train
|
867f60347222ff31e747998e2632d520cf1f9ea3
|
diff --git a/bridgepoint/ooaofooa.py b/bridgepoint/ooaofooa.py
index <HASH>..<HASH> 100644
--- a/bridgepoint/ooaofooa.py
+++ b/bridgepoint/ooaofooa.py
@@ -3996,3 +3996,20 @@ def empty_model():
loader = Loader()
return loader.build_metamodel()
+
+def load_model(resource):
+ '''
+ Load and return a model from a resource.
+ The resource may be either a filename, a path, or a list of filenames
+ and/or paths.
+ '''
+ if isinstance(resource, str):
+ resource = [resource]
+
+ loader = Loader()
+ for filename in resource:
+ loader.filename_input(filename)
+
+ return loader.build_metamodel()
+
+
diff --git a/examples/list_bp_class_attributes.py b/examples/list_bp_class_attributes.py
index <HASH>..<HASH> 100644
--- a/examples/list_bp_class_attributes.py
+++ b/examples/list_bp_class_attributes.py
@@ -16,9 +16,8 @@ if len(sys.argv) < 2:
sys.exit(1)
-loader = ooaofooa.Loader()
-loader.filename_input(sys.argv[1])
-m = loader.build_metamodel()
+m = ooaofooa.load_model(sys.argv[1])
+
by_name = lambda inst: inst.Name
for o_obj in sorted(m.select_many('O_OBJ'), key=by_name):
diff --git a/examples/list_bp_enums.py b/examples/list_bp_enums.py
index <HASH>..<HASH> 100644
--- a/examples/list_bp_enums.py
+++ b/examples/list_bp_enums.py
@@ -15,9 +15,7 @@ if len(sys.argv) < 2:
sys.exit(1)
-loader = ooaofooa.Loader()
-loader.filename_input(sys.argv[1])
-m = loader.build_metamodel()
+m = ooaofooa.load_model(sys.argv[1])
get_name = lambda inst: one(inst).S_DT[17]().Name
|
ooaofooa: added convenient function for load a model from some resource.
|
xtuml_pyxtuml
|
train
|
50ef6c0b28f2aa4ad79e7c8895553a5c59581eba
|
diff --git a/lib/actions/create.js b/lib/actions/create.js
index <HASH>..<HASH> 100644
--- a/lib/actions/create.js
+++ b/lib/actions/create.js
@@ -12,7 +12,6 @@ module.exports = function (description, done) {
if (!description) return done(new Error('Missing parameter: description'));
async.waterfall([
migrationsDir.shouldExist,
- configFile.shouldExist,
function (taskDone) {
description = description.split(' ').join('_'); // replace spaces with underscores
var source = path.join(__dirname, '../../samples/migration.js');
diff --git a/test/create.test.js b/test/create.test.js
index <HASH>..<HASH> 100644
--- a/test/create.test.js
+++ b/test/create.test.js
@@ -47,17 +47,9 @@ describe('create', function () {
});
});
- it('should check that the config file exists', function (done) {
+ it('should not be necessary to have an config file present', function (done) {
create('my_description', function () {
- expect(configFile.shouldExist.called).to.equal(true);
- done();
- });
- });
-
- it('should yield an error when config file does not exist', function (done) {
- configFile.shouldExist.yields(new Error('config file does not exist'));
- create('my_description', function (err) {
- expect(err.message).to.equal('config file does not exist');
+ expect(configFile.shouldExist.called).to.equal(false);
done();
});
});
|
do not check for the config file presence when running the 'create' command
|
seppevs_migrate-mongo
|
train
|
d2c3eacc325e2b9d4546e8679ea467379c5ac1b5
|
diff --git a/abilian/core/commands/indexing.py b/abilian/core/commands/indexing.py
index <HASH>..<HASH> 100644
--- a/abilian/core/commands/indexing.py
+++ b/abilian/core/commands/indexing.py
@@ -65,23 +65,23 @@ def reindex(clear=False):
progress.start()
count_current = 0
- with writer.group():
- for obj in q.yield_per(1000):
- if obj.object_type != current_object_type:
- # may happen if obj is a subclass and mother class is indexable
- continue
-
- object_key = obj.object_key
-
- if object_key in indexed:
- continue
- document = adapter.get_document(obj)
- writer.add_document(**document)
- indexed.add(object_key)
- count_current += 1
- try:
- progress.update(count_current)
- except ValueError:
- pass
+ for obj in q.yield_per(1000):
+ if obj.object_type != current_object_type:
+ # may happen if obj is a subclass and its parent class is also
+ # indexable
+ continue
+
+ object_key = obj.object_key
+
+ if object_key in indexed:
+ continue
+ document = svc.get_document(obj, adapter)
+ writer.add_document(**document)
+ indexed.add(object_key)
+ count_current += 1
+ try:
+ progress.update(count_current)
+ except ValueError:
+ pass
progress.finish()
diff --git a/abilian/services/indexing/service.py b/abilian/services/indexing/service.py
index <HASH>..<HASH> 100644
--- a/abilian/services/indexing/service.py
+++ b/abilian/services/indexing/service.py
@@ -296,6 +296,24 @@ class WhooshIndexService(Service):
index_update.apply_async(kwargs=dict(index='default', items=items))
self.clear_update_queue()
+ def get_document(self, obj, adapter=None):
+ """
+ """
+ if adapter is None:
+ class_name = fqcn(obj.__class__)
+ adapter = self.adapted.get(class_name)
+
+ if adapter is None or not adapter.indexable:
+ return None
+
+ document = adapter.get_document(obj)
+
+ if not document.get('allowed_roles_and_users'):
+ # no data for security: assume anybody can access the document
+ document['allowed_roles_and_users'] = indexable_role(Anonymous)
+
+ return document
+
def index_objects(self, objects, index='default'):
"""
Bulk index a list of objects.
@@ -305,19 +323,21 @@ class WhooshIndexService(Service):
index_name = index
index = self.app_state.indexes[index_name]
+ indexed = set()
with index.writer() as writer:
for obj in objects:
- model_name = fqcn(obj.__class__)
- adapter = self.adapted.get(model_name)
-
- if adapter is None or not adapter.indexable:
+ document = self.get_document(obj)
+ if document is None:
continue
- document = adapter.get_document(obj)
object_key = document['object_key']
+ if object_key in indexed:
+ continue
+
writer.delete_by_term('object_key', object_key)
writer.add_document(**document)
+ indexed.add(object_key)
service = WhooshIndexService()
@@ -369,7 +389,7 @@ def index_update(index, items):
# for key in indexed_fields:
# getattr(obj, key, None)
- document = adapter.get_document(obj)
+ document = service.get_document(obj, adapter)
writer.add_document(**document)
updated.add(object_key)
|
indexing: use get_document() defined on service object: it ensures all objects have a value for allowed_roles_and_users
|
abilian_abilian-core
|
train
|
7e63ff76ed1f565d1ace355dd69bc59236cf843c
|
diff --git a/javamelody-core/src/main/java/net/bull/javamelody/internal/model/MavenArtifact.java b/javamelody-core/src/main/java/net/bull/javamelody/internal/model/MavenArtifact.java
index <HASH>..<HASH> 100644
--- a/javamelody-core/src/main/java/net/bull/javamelody/internal/model/MavenArtifact.java
+++ b/javamelody-core/src/main/java/net/bull/javamelody/internal/model/MavenArtifact.java
@@ -572,6 +572,11 @@ public final class MavenArtifact implements Serializable {
}
private static File getMavenArtifact(String filePath) throws IOException {
+ if (filePath.contains("${")) {
+ // si le chemin contient des variables non résolues telles que ${project.version},
+ // ce n'est pas la peine de chercher
+ return null;
+ }
final File storageDirectory = Parameters
.getStorageDirectory(Parameters.getCurrentApplication());
final String subDirectory;
|
if path contains unresolved variables, it is useless to search
|
javamelody_javamelody
|
train
|
5a9d604b23064cbfc8baefde99aca60a97d1f280
|
diff --git a/src/main/java/com/basistech/tclre/Compiler.java b/src/main/java/com/basistech/tclre/Compiler.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/basistech/tclre/Compiler.java
+++ b/src/main/java/com/basistech/tclre/Compiler.java
@@ -55,6 +55,7 @@ class Compiler {
static final int SOME = 2;
static final int INF = 3;
private static final Logger LOG = LoggerFactory.getLogger(Compiler.class);
+ private static final boolean isDebug = System.getProperty("tclre.debug") != null;
char[] pattern;
int now; /* scan pointer into string */
int stop; /* end of string */
@@ -185,7 +186,7 @@ class Compiler {
/* finish setup of nfa and its subre tree */
nfa.specialcolors();
- if (LOG.isDebugEnabled()) {
+ if (LOG.isDebugEnabled() && isDebug) {
LOG.debug("========= RAW ==========");
nfa.dumpnfa();
LOG.debug(tree.dumpst(true));
@@ -196,7 +197,7 @@ class Compiler {
markst(tree);
cleanst();
- if (LOG.isDebugEnabled()) {
+ if (LOG.isDebugEnabled() && isDebug) {
LOG.debug("========= TREE FIXED ==========");
LOG.debug(tree.dumpst(true));
}
@@ -215,7 +216,7 @@ class Compiler {
}
/* build compacted NFAs for tree, lacons, fast search */
- if (LOG.isDebugEnabled()) {
+ if (LOG.isDebugEnabled() && isDebug) {
LOG.debug("========= SEARCH ==========");
}
/* can sacrifice main NFA now, so use it as work area */
@@ -508,7 +509,7 @@ class Compiler {
assert t.begin != null;
- if (LOG.isDebugEnabled()) {
+ if (LOG.isDebugEnabled() && isDebug) {
LOG.debug(String.format("========= TREE NODE %s ==========", t.shortId()));
}
@@ -1477,7 +1478,7 @@ class Compiler {
/*
* Note: ICU operates in UTF-32 here, and the ColorMap is happy to play along.
*/
- if (LOG.isDebugEnabled()) {
+ if (LOG.isDebugEnabled() && isDebug) {
LOG.debug(String.format("%s %d %4x %4x", set, rx, rangeStart, rangeEnd));
}
//TODO: this arc is probably redundant.
diff --git a/src/main/java/com/basistech/tclre/Nfa.java b/src/main/java/com/basistech/tclre/Nfa.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/basistech/tclre/Nfa.java
+++ b/src/main/java/com/basistech/tclre/Nfa.java
@@ -24,6 +24,8 @@ import org.slf4j.LoggerFactory;
*/
class Nfa {
private static final Logger LOG = LoggerFactory.getLogger(Nfa.class);
+ private static final boolean isDebug = System.getProperty("tclre.debug") != null;
+
private static final int INCOMPATIBLE = 1;
private static final int SATISFIED = 2;
private static final int COMPATIBLE = 3;
@@ -383,7 +385,7 @@ class Nfa {
* dumpnfa - dump an NFA in human-readable form
*/
void dumpnfa() {
- if (!LOG.isDebugEnabled()) {
+ if (!LOG.isDebugEnabled() || !isDebug) {
return;
}
@@ -420,7 +422,7 @@ class Nfa {
void dumpstate(State s) {
Arc a;
- if (!LOG.isDebugEnabled()) {
+ if (!LOG.isDebugEnabled() || !isDebug) {
return;
}
|
TEJ-<I>: added guards to isDebugEnabled() since log4j 1.x defaults to ALL if not initialized.
|
basis-technology-corp_tcl-regex-java
|
train
|
1b7df6574b2e42fbe6fb2d2ec437af86a2cd4f19
|
diff --git a/webui/webui.go b/webui/webui.go
index <HASH>..<HASH> 100644
--- a/webui/webui.go
+++ b/webui/webui.go
@@ -33,7 +33,7 @@ func NewServer(namespace string, pool *redis.Pool, hostPort string) *WebUIServer
pool: pool,
client: work.NewClient(namespace, pool),
hostPort: hostPort,
- server: manners.NewServer(),
+ server: manners.NewWithServer(&http.Server{Addr: hostPort, Handler: router}),
router: router,
}
@@ -60,13 +60,13 @@ func NewServer(namespace string, pool *redis.Pool, hostPort string) *WebUIServer
func (w *WebUIServer) Start() {
w.wg.Add(1)
go func(w *WebUIServer) {
- w.server.ListenAndServe(w.hostPort, w.router)
+ w.server.ListenAndServe()
w.wg.Done()
}(w)
}
func (w *WebUIServer) Stop() {
- w.server.Shutdown <- true
+ w.server.Close()
w.wg.Wait()
}
|
Update manners to use new version.
|
gocraft_work
|
train
|
5d69089c657356c2c301ad1dc5a57b2c5d963535
|
diff --git a/monica/config.py b/monica/config.py
index <HASH>..<HASH> 100644
--- a/monica/config.py
+++ b/monica/config.py
@@ -45,7 +45,10 @@ def configure():
sys.exit()
try:
+ flag = True
config = json.loads(open(os.path.join(_ROOT, ".monica/config.json"), "r").read())
except:
+ print "no config file found"
+ flag = False
configure()
config = json.loads(open(os.path.join(_ROOT, ".monica/config.json"), "r").read())
diff --git a/monica/monica.py b/monica/monica.py
index <HASH>..<HASH> 100644
--- a/monica/monica.py
+++ b/monica/monica.py
@@ -22,16 +22,15 @@ import requests
from docopt import docopt
import json
from config import configure
-
try:
from config import config
except:
print 'No Configuration File Found'
-
+from config import flag
from tabulate import tabulate
import random
-__version__ = '0.0.8'
+__version__ = '0.0.9'
headers = {'Accept' : 'application/json', 'user_key': config['api_key'], 'User-Agent': 'curl/7.35.0'}
@@ -216,7 +215,7 @@ def main():
'''monica helps you order food from the timeline'''
arguments = docopt(__doc__, version=__version__)
- if arguments['configure']:
+ if arguments['configure'] and flag:
configure()
if arguments['cuisine']:
if arguments['list']:
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -10,7 +10,7 @@ if os.environ.get('USER', '') == 'vagrant':
setup(
name='monica',
- version='0.0.8',
+ version='0.0.9',
description='monica is a command line chef that brings you tasty food',
long_description=open('README.rst').read(),
author='Zephrys',
|
moved to version <I>., fixed the double configure bug
|
Zephrys_monica
|
train
|
968220711d7592cd6712712c75ca7cdd94ed79a3
|
diff --git a/job/client/src/main/java/alluxio/job/plan/replicate/DefaultReplicationHandler.java b/job/client/src/main/java/alluxio/job/plan/replicate/DefaultReplicationHandler.java
index <HASH>..<HASH> 100644
--- a/job/client/src/main/java/alluxio/job/plan/replicate/DefaultReplicationHandler.java
+++ b/job/client/src/main/java/alluxio/job/plan/replicate/DefaultReplicationHandler.java
@@ -50,15 +50,21 @@ public final class DefaultReplicationHandler implements ReplicationHandler {
} catch (NotFoundException e) {
// if the job status doesn't exist, assume the job has failed
return Status.FAILED;
+ } finally {
+ mJobMasterClientPool.release(client);
}
}
@Override
public List<Long> findJobs(String jobName, Set<Status> status) throws IOException {
final JobMasterClient client = mJobMasterClientPool.acquire();
- return client.list(ListAllPOptions.newBuilder().setName(jobName)
- .addAllStatus(status.stream().map(Status::toProto).collect(Collectors.toSet()))
- .build());
+ try {
+ return client.list(ListAllPOptions.newBuilder().setName(jobName)
+ .addAllStatus(status.stream().map(Status::toProto).collect(Collectors.toSet()))
+ .build());
+ } finally {
+ mJobMasterClientPool.release(client);
+ }
}
@Override
|
Fix client pool leak
### What changes are proposed in this pull request?
Release client pool resources
### Why are the changes needed?
Address a client leak
### Does this PR introduce any user facing changes?
None
pr-link: Alluxio/alluxio#<I>
change-id: cid-a2db9d<I>ec5da4b5b<I>c5d<I>fa0b6b0d<I>bd
|
Alluxio_alluxio
|
train
|
2456a850747348cf543383e618b1530ffb473608
|
diff --git a/src/Lib/Twig/Node/Element.php b/src/Lib/Twig/Node/Element.php
index <HASH>..<HASH> 100644
--- a/src/Lib/Twig/Node/Element.php
+++ b/src/Lib/Twig/Node/Element.php
@@ -63,7 +63,7 @@ class Element extends \Twig_Node
$compiler->subcompile($data);
}
$options = $this->getNode('options');
- if ($data !== null) {
+ if ($options !== null) {
$compiler->raw(',');
$compiler->subcompile($options);
}
|
subcompile() expects \Twig_NodeInterface
|
WyriHaximus_TwigView
|
train
|
b08b59050d0a788df864b12f12678e56dafca02f
|
diff --git a/engine-rest/engine-rest/src/main/java/org/camunda/bpm/engine/rest/MetricsRestService.java b/engine-rest/engine-rest/src/main/java/org/camunda/bpm/engine/rest/MetricsRestService.java
index <HASH>..<HASH> 100644
--- a/engine-rest/engine-rest/src/main/java/org/camunda/bpm/engine/rest/MetricsRestService.java
+++ b/engine-rest/engine-rest/src/main/java/org/camunda/bpm/engine/rest/MetricsRestService.java
@@ -1,5 +1,5 @@
/*
- * Copyright © 2013-2018 camunda services GmbH and various authors (info@camunda.com)
+ * Copyright © 2013-2019 camunda services GmbH and various authors (info@camunda.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
|
chore(rest/engine): adjust year in license header
Related to CAM-<I>
|
camunda_camunda-bpm-platform
|
train
|
14e96cd1f915719c16c489ea10a7f68296570632
|
diff --git a/src/ScrollController.js b/src/ScrollController.js
index <HASH>..<HASH> 100644
--- a/src/ScrollController.js
+++ b/src/ScrollController.js
@@ -1741,6 +1741,14 @@ define(function(require, exports, module) {
if (this.options.paginationMode === PaginationMode.PAGE) {
scrollStart = scrollOffset - this.options.extraBoundsSpace[0];
scrollEnd = scrollOffset + size[this._direction] + this.options.extraBoundsSpace[1];
+ if ((scrollOffset + size[this._direction]) < 0) {
+ scrollStart += size[this._direction];
+ scrollEnd += size[this._direction];
+ }
+ else if ((scrollOffset - size[this._direction]) > 0) {
+ scrollStart -= size[this._direction];
+ scrollEnd -= size[this._direction];
+ }
}
if (this.options.layoutAll) {
scrollStart = -1000000;
@@ -1774,7 +1782,7 @@ define(function(require, exports, module) {
this._postLayout(size, scrollOffset);
}
- /*if (this.options.paginationMode === PaginationMode.PAGE) {
+ if (this.options.paginationMode === PaginationMode.PAGE) {
var node = this._nodes._first;
while (node) {
if (!node._invalidated && !node._removing) {
@@ -1785,7 +1793,7 @@ define(function(require, exports, module) {
if (this._nodes._contextState.addCount) {
console.log('adding nodes: ' + this._nodes._contextState.addCount);
}
- }*/
+ }
// Mark non-invalidated nodes for removal
this._nodes.removeNonInvalidatedNodes(this.options.flowOptions.removeSpec);
|
Fixed issue with optimized pagination which caused fast swipe gestures to not work correctly.
|
IjzerenHein_famous-flex
|
train
|
c59679aaaab8be78755373f50aaac81de3b6040b
|
diff --git a/Entity/Xml2arrayFunctions.php b/Entity/Xml2arrayFunctions.php
index <HASH>..<HASH> 100644
--- a/Entity/Xml2arrayFunctions.php
+++ b/Entity/Xml2arrayFunctions.php
@@ -16,7 +16,6 @@ use FabienCrassat\CurriculumVitaeBundle\Utility\AgeCalculator;
class Xml2arrayFunctions {
private $arXML;
private $attr;
- private $key;
private $language;
private $CVFile;
|
[Unused Code][Minor] The property $key is not used and could be removed.
|
fabiencrassat_CurriculumVitaeBundle
|
train
|
be4d2ec296ed5bd8c45b3f6508062b7d269f9f07
|
diff --git a/lib/svtplay_dl/fetcher/dash.py b/lib/svtplay_dl/fetcher/dash.py
index <HASH>..<HASH> 100644
--- a/lib/svtplay_dl/fetcher/dash.py
+++ b/lib/svtplay_dl/fetcher/dash.py
@@ -132,7 +132,7 @@ def adaptionset(attributes, elements, url, baseurl=None):
codecs = None
if "codecs" in element.attrib:
codecs = element.attrib["codecs"]
- lang = None
+ lang = ""
if "lang" in element.attrib:
lang = element.attrib["lang"]
if role_elemets:
|
dash: set language as empty string as default
|
spaam_svtplay-dl
|
train
|
98efcb822ee085d24c543130852893d3a5801913
|
diff --git a/azurerm/config.go b/azurerm/config.go
index <HASH>..<HASH> 100644
--- a/azurerm/config.go
+++ b/azurerm/config.go
@@ -12,7 +12,6 @@ import (
resourcesprofile "github.com/Azure/azure-sdk-for-go/profiles/2017-03-09/resources/mgmt/resources"
appinsights "github.com/Azure/azure-sdk-for-go/services/appinsights/mgmt/2015-05-01/insights"
"github.com/Azure/azure-sdk-for-go/services/batch/mgmt/2018-12-01/batch"
- cdnSvc "github.com/Azure/azure-sdk-for-go/services/cdn/mgmt/2017-10-12/cdn"
cognitiveSvc "github.com/Azure/azure-sdk-for-go/services/cognitiveservices/mgmt/2017-04-18/cognitiveservices"
"github.com/Azure/azure-sdk-for-go/services/compute/mgmt/2018-06-01/compute"
"github.com/Azure/azure-sdk-for-go/services/containerinstance/mgmt/2018-10-01/containerinstance"
@@ -393,10 +392,11 @@ func getArmClient(c *authentication.Config, skipProviderRegistration bool, partn
client.apimgmt = apimgmt.BuildClients(endpoint, c.SubscriptionID, partnerId, auth)
client.automation = automation.BuildClients(endpoint, c.SubscriptionID, partnerId, auth)
+ client.cdn = cdn.BuildClients(endpoint, c.SubscriptionID, partnerId, auth)
+
client.registerAppInsightsClients(endpoint, c.SubscriptionID, auth)
client.registerAuthentication(endpoint, graphEndpoint, c.SubscriptionID, c.TenantID, auth, graphAuth)
client.registerBatchClients(endpoint, c.SubscriptionID, auth)
- client.registerCDNClients(endpoint, c.SubscriptionID, auth)
client.registerCognitiveServiceClients(endpoint, c.SubscriptionID, auth)
client.registerComputeClients(endpoint, c.SubscriptionID, auth)
client.registerContainerClients(endpoint, c.SubscriptionID, auth)
@@ -486,23 +486,6 @@ func (c *ArmClient) registerBatchClients(endpoint, subscriptionId string, auth a
c.batchPoolClient = batchPool
}
-func (c *ArmClient) registerCDNClients(endpoint, subscriptionId string, auth autorest.Authorizer) {
- customDomainsClient := cdnSvc.NewCustomDomainsClientWithBaseURI(endpoint, subscriptionId)
- c.configureClient(&customDomainsClient.Client, auth)
-
- endpointsClient := cdnSvc.NewEndpointsClientWithBaseURI(endpoint, subscriptionId)
- c.configureClient(&endpointsClient.Client, auth)
-
- profilesClient := cdnSvc.NewProfilesClientWithBaseURI(endpoint, subscriptionId)
- c.configureClient(&profilesClient.Client, auth)
-
- c.cdn = &cdn.Client{
- CustomDomainsClient: customDomainsClient,
- EndpointsClient: endpointsClient,
- ProfilesClient: profilesClient,
- }
-}
-
func (c *ArmClient) registerCognitiveServiceClients(endpoint, subscriptionId string, auth autorest.Authorizer) {
accountsClient := cognitiveSvc.NewAccountsClientWithBaseURI(endpoint, subscriptionId)
c.configureClient(&accountsClient.Client, auth)
diff --git a/azurerm/internal/services/cdn/client.go b/azurerm/internal/services/cdn/client.go
index <HASH>..<HASH> 100644
--- a/azurerm/internal/services/cdn/client.go
+++ b/azurerm/internal/services/cdn/client.go
@@ -2,6 +2,8 @@ package cdn
import (
"github.com/Azure/azure-sdk-for-go/services/cdn/mgmt/2017-10-12/cdn"
+ "github.com/Azure/go-autorest/autorest"
+ "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/ar"
)
type Client struct {
@@ -9,3 +11,18 @@ type Client struct {
EndpointsClient cdn.EndpointsClient
ProfilesClient cdn.ProfilesClient
}
+
+func BuildClients(endpoint, subscriptionId, partnerId string, auth autorest.Authorizer) *Client {
+ c := Client{}
+
+ c.CustomDomainsClient = cdn.NewCustomDomainsClientWithBaseURI(endpoint, subscriptionId)
+ ar.ConfigureClient(&c.CustomDomainsClient.Client, auth, partnerId)
+
+ c.EndpointsClient = cdn.NewEndpointsClientWithBaseURI(endpoint, subscriptionId)
+ ar.ConfigureClient(&c.EndpointsClient.Client, auth, partnerId)
+
+ c.ProfilesClient = cdn.NewProfilesClientWithBaseURI(endpoint, subscriptionId)
+ ar.ConfigureClient(&c.ProfilesClient.Client, auth, partnerId)
+
+ return &c
+}
|
Client Registration Refactor: cdn
|
terraform-providers_terraform-provider-azurerm
|
train
|
db1678ebcd21e4e3af4e73d2a0d8115d3dba9cad
|
diff --git a/py/selenium/webdriver/firefox/firefox_profile.py b/py/selenium/webdriver/firefox/firefox_profile.py
index <HASH>..<HASH> 100644
--- a/py/selenium/webdriver/firefox/firefox_profile.py
+++ b/py/selenium/webdriver/firefox/firefox_profile.py
@@ -90,10 +90,7 @@ class FirefoxProfile(object):
if self.profile_dir is None:
self.profile_dir = self._create_tempfolder()
else:
- newprof = os.path.join(
- tempfile.gettempdir(), "webdriver-py-profilecopy")
- if os.path.exists(newprof):
- shutil.rmtree(newprof)
+ newprof = os.path.join(tempfile.mkdtemp(), "webdriver-py-profilecopy")
shutil.copytree(self.profile_dir, newprof)
self.profile_dir = newprof
self._read_existing_userjs()
|
EranMes, on behalf of KevinCooney: Avoid a race condition during the creation of a temporary directory for a profile.
r<I>
|
SeleniumHQ_selenium
|
train
|
1f43fd43d6a6c68f9bb371a47210c3344bc3aa78
|
diff --git a/lib/oauth/request_proxy/action_controller_request.rb b/lib/oauth/request_proxy/action_controller_request.rb
index <HASH>..<HASH> 100644
--- a/lib/oauth/request_proxy/action_controller_request.rb
+++ b/lib/oauth/request_proxy/action_controller_request.rb
@@ -1,36 +1,15 @@
# frozen_string_literal: true
require "active_support"
-require "active_support/version"
require "action_controller"
require "uri"
-if Gem::Version.new(ActiveSupport::VERSION::STRING) < Gem::Version.new("3")
- # rails 2.x
- require "action_controller/request"
- unless ActionController::Request::HTTP_METHODS.include?("patch")
- ActionController::Request::HTTP_METHODS << "patch"
- ActionController::Request::HTTP_METHOD_LOOKUP["PATCH"] = :patch
- ActionController::Request::HTTP_METHOD_LOOKUP["patch"] = :patch
- end
-
-elsif Gem::Version.new(ActiveSupport::VERSION::STRING) < Gem::Version.new("4")
- # rails 3.x
- require "action_dispatch/http/request"
- unless ActionDispatch::Request::HTTP_METHODS.include?("patch")
- ActionDispatch::Request::HTTP_METHODS << "patch"
- ActionDispatch::Request::HTTP_METHOD_LOOKUP["PATCH"] = :patch
- ActionDispatch::Request::HTTP_METHOD_LOOKUP["patch"] = :patch
- end
-
-else # rails 4.x and later - already has patch
- require "action_dispatch/http/request"
-end
+require "action_dispatch/http/request"
module OAuth
module RequestProxy
class ActionControllerRequest < OAuth::RequestProxy::Base
- proxies(defined?(::ActionDispatch::AbstractRequest) ? ::ActionDispatch::AbstractRequest : ::ActionDispatch::Request)
+ proxies(::ActionDispatch::Request)
def method
request.method.to_s.upcase
@@ -50,7 +29,7 @@ module OAuth
end
end
- # Override from OAuth::RequestProxy::Base to avoid roundtrip
+ # Override from OAuth::RequestProxy::Base to avoid round-trip
# conversion to Hash or Array and thus preserve the original
# parameter names
def parameters_for_signature
diff --git a/lib/oauth/version.rb b/lib/oauth/version.rb
index <HASH>..<HASH> 100644
--- a/lib/oauth/version.rb
+++ b/lib/oauth/version.rb
@@ -2,6 +2,6 @@
module OAuth
module Version
- VERSION = "0.6.1"
+ VERSION = "1.0.0"
end
end
|
🔥 Remove hacks for old Rails
|
oauth-xx_oauth-ruby
|
train
|
40c81f8fc5752c7d853b46a0e7f776c5935f226c
|
diff --git a/pyscreeze/__init__.py b/pyscreeze/__init__.py
index <HASH>..<HASH> 100644
--- a/pyscreeze/__init__.py
+++ b/pyscreeze/__init__.py
@@ -1,16 +1,14 @@
# PyScreeze
-# by Al Sweigart
-# https://github.com/asweigart/pyscreeze
-# BSD license
"""
-So, apparently Pillow support on Ubuntu 64-bit has several additional steps since it doesn't have JPEG/PNG support out of the box. Description here:
+NOTE:
+Apparently Pillow support on Ubuntu 64-bit has several additional steps since it doesn't have JPEG/PNG support out of the box. Description here:
https://stackoverflow.com/questions/7648200/pip-install-pil-e-tickets-1-no-jpeg-png-support
http://ubuntuforums.org/showthread.php?t=1751455
"""
-__version__ = '0.1.22'
+__version__ = '0.1.24'
import collections
import datetime
@@ -23,7 +21,7 @@ try:
from PIL import Image
from PIL import ImageOps
except ImportError:
- pass
+ pass # TODO - This is not good. Update this. Why am I just ignoring this?
from contextlib import contextmanager
try:
@@ -43,6 +41,18 @@ if useOpenCV:
LOAD_GRAYSCALE = cv2.IMREAD_GRAYSCALE
+import sys
+if sys.platform == 'win32':
+ # On Windows, the monitor scaling can be set to something besides normal 100%.
+ # PyScreeze and Pillow needs to account for this to make accurate screenshots.
+ # TODO - How does macOS and Linux handle monitor scaling?
+ import ctypes
+ try:
+ ctypes.windll.user32.SetProcessDPIAware()
+ except AttributeError:
+ pass # Windows XP doesn't support monitor scaling, so just do nothing.
+
+
GRAYSCALE_DEFAULT = False
# For version 0.1.19 I changed it so that ImageNotFoundException was raised
@@ -488,7 +498,7 @@ elif sys.platform == 'win32':
try:
from PIL import ImageGrab
except ImportError:
- pass
+ pass # TODO This is bad. Why do I just ignore this?
else:
screenshot = _screenshot_linux
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -22,24 +22,23 @@ setup(
packages=['pyscreeze'],
test_suite='tests',
install_requires=['Pillow'],
+ requires_python='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*', # Copied from the Pillow library, since PyScreeze is built on top of it.
keywords="screenshot screen screencap capture scrot screencapture image",
classifiers=[
- 'Development Status :: 3 - Alpha',
+ 'Development Status :: 4 - Beta',
'Environment :: Win32 (MS Windows)',
'Environment :: X11 Applications',
'Environment :: MacOS X',
'Intended Audience :: Developers',
- 'License :: OSI Approved :: BSD License',
+ 'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
+ # Copied from the Pillow library, since PyScreeze is built on top of it:
'Programming Language :: Python :: 2',
- 'Programming Language :: Python :: 2.5',
- 'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
- 'Programming Language :: Python :: 3.1',
- 'Programming Language :: Python :: 3.2',
- 'Programming Language :: Python :: 3.3',
- 'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: 3.6',
+ 'Programming Language :: Python :: 3.7',
],
)
\ No newline at end of file
|
Fixed monitor DPI scaling issue on Windows.
|
asweigart_pyscreeze
|
train
|
4e82aaff878ee4c15a6b5f6693213a9476ba5d91
|
diff --git a/lib/url.js b/lib/url.js
index <HASH>..<HASH> 100644
--- a/lib/url.js
+++ b/lib/url.js
@@ -68,7 +68,7 @@ function parse(uri){
if (!obj.hostname && obj.host) {
var pieces = obj.host.split(':');
obj.hostname = pieces.shift();
- obj.port = pieces.pop();
+ if (pieces.length) obj.port = pieces.pop();
}
// make sure we treat `localhost:80` and `localhost` equally
|
url: allow `port` in combination with `host`
|
tsjing_socket.io-client
|
train
|
b68e39111b4f7ac262e0d0f11cf20a3dcaf22f17
|
diff --git a/HistogramPanel.py b/HistogramPanel.py
index <HASH>..<HASH> 100644
--- a/HistogramPanel.py
+++ b/HistogramPanel.py
@@ -112,10 +112,10 @@ class AdornmentsCanvasItem(CanvasItem.AbstractCanvasItem):
drawing_context.restore()
-class LinePlotCanvasItem(CanvasItem.AbstractCanvasItem):
+class SimpleLineGraphCanvasItem(CanvasItem.AbstractCanvasItem):
def __init__(self):
- super(LinePlotCanvasItem, self).__init__()
+ super(SimpleLineGraphCanvasItem, self).__init__()
self.data = None
def repaint(self, drawing_context):
@@ -153,9 +153,9 @@ class HistogramCanvasItem(CanvasItem.CanvasItemComposition):
super(HistogramCanvasItem, self).__init__()
self.document_controller = document_controller
self.adornments_canvas_item = AdornmentsCanvasItem()
- self.line_plot_canvas_item = LinePlotCanvasItem()
+ self.simple_line_graph_canvas_item = SimpleLineGraphCanvasItem()
# canvas items get added back to front
- self.add_canvas_item(self.line_plot_canvas_item)
+ self.add_canvas_item(self.simple_line_graph_canvas_item)
self.add_canvas_item(self.adornments_canvas_item)
self.__data_item = None
self.__pressed = False
@@ -172,6 +172,7 @@ class HistogramCanvasItem(CanvasItem.CanvasItemComposition):
self.selected_data_item_changed(None, set([DataItem.SOURCE]))
# disconnect self as listener
self.document_controller.remove_listener(self)
+ super(HistogramCanvasItem, self).close()
# _get_data_item is only used for testing
def _get_data_item(self):
@@ -186,8 +187,8 @@ class HistogramCanvasItem(CanvasItem.CanvasItemComposition):
if not self.__pressed:
self.adornments_canvas_item.display_limits = (0, 1)
histogram_data = self.__data_item.get_histogram_data() if self.__data_item else None
- self.line_plot_canvas_item.data = histogram_data
- self.line_plot_canvas_item.update()
+ self.simple_line_graph_canvas_item.data = histogram_data
+ self.simple_line_graph_canvas_item.update()
self.adornments_canvas_item.update()
self.repaint_if_needed()
|
Rename line plot canvas item to simple line graph canvas item.
svn r<I>
|
nion-software_nionswift
|
train
|
0059f1698fcb1ba97815cd4fe806514ff749ad41
|
diff --git a/oci/oci.go b/oci/oci.go
index <HASH>..<HASH> 100644
--- a/oci/oci.go
+++ b/oci/oci.go
@@ -55,7 +55,7 @@ const (
// minCtrStopTimeout is the minimal amout of time in seconds to wait
// before issuing a timeout regarding the proper termination of the
// container.
- minCtrStopTimeout = 10
+ minCtrStopTimeout = 30
// UntrustedRuntime is the implicit runtime handler name used to
// fallback to the untrusted runtime.
|
oci: Extend container stop timeout
With the recent introduction of the parallelization of multiple
containers stop, it might take more than <I> seconds to stop a
container running with Kata Containers, given the fact that the
CI runs in a nested environment.
This patch extends the minimum timeout to a larger value of <I>
seconds to ensure the CI will not run into this issue anymore.
Fixes #<I>
|
cri-o_cri-o
|
train
|
6bae86532f9baac87769b4622b4f9e1586069691
|
diff --git a/lib/twostroke/tokens.rb b/lib/twostroke/tokens.rb
index <HASH>..<HASH> 100644
--- a/lib/twostroke/tokens.rb
+++ b/lib/twostroke/tokens.rb
@@ -7,7 +7,7 @@ module Twostroke
[ :SINGLE_COMMENT, /\/\/.*?$/ ],
[ :WHITESPACE, /\s+/ ],
- [ :NUMBER, /(\d+(\.?\d*([eE][+-]?\d+)?)?|\.\d+([eE][+-]?\d+)?)/, ->m { m[0].to_f } ],
+ [ :NUMBER, /((?<oct>0[0-7]+)|(?<hex>0x[A-Fa-f0-9]+)|(?<to_f>(\d+(\.?\d*([eE][+-]?\d+)?)?|\.\d+([eE][+-]?\d+)?)))/, ->m { m[0].send m.names.first } ],
*RESERVED.map do |w|
[ w.upcase.intern, /#{w}(?=[^a-zA-Z_0-9])/ ]
|
hex and octal numbers
|
charliesome_twostroke
|
train
|
5974de4ba2a2d48adf395d92be69750db5eefe86
|
diff --git a/tests/testprofiles/app.py b/tests/testprofiles/app.py
index <HASH>..<HASH> 100644
--- a/tests/testprofiles/app.py
+++ b/tests/testprofiles/app.py
@@ -4,3 +4,4 @@ from django.apps import AppConfig
class TestProfilesConfig(AppConfig):
name = 'testprofiles'
verbose_name = 'Test profiles'
+ default_auto_field = 'django.db.models.AutoField'
|
Add default_auto_field setting, strongly recommended in Django <I> (#<I>)
|
knaperek_djangosaml2
|
train
|
78a241aea1534560e6dc1d94d4ffe9ab29ec43ed
|
diff --git a/jre_emul/android/platform/external/icu/android_icu4j/src/main/java/android/icu/impl/UCharacterProperty.java b/jre_emul/android/platform/external/icu/android_icu4j/src/main/java/android/icu/impl/UCharacterProperty.java
index <HASH>..<HASH> 100644
--- a/jre_emul/android/platform/external/icu/android_icu4j/src/main/java/android/icu/impl/UCharacterProperty.java
+++ b/jre_emul/android/platform/external/icu/android_icu4j/src/main/java/android/icu/impl/UCharacterProperty.java
@@ -509,61 +509,6 @@ public final class UCharacterProperty
}
}
- /*
- * J2ObjC: the following 3 classes were created from anonymous classes in order to
- * annotate them with @WeakOuter.
- */
- @WeakOuter
- private class IntProperty_GeneralCategory extends IntProperty {
- IntProperty_GeneralCategory() {
- super(SRC_CHAR);
- }
- @Override
- int getValue(int c) {
- return getType(c);
- }
- @Override
- int getMaxValue(int which) {
- return UCharacterCategory.CHAR_CATEGORY_COUNT-1;
- }
- }
-
- @WeakOuter
- private class IntProperty_NumericType extends IntProperty {
- IntProperty_NumericType() {
- super(SRC_CHAR);
- }
- @Override
- int getValue(int c) {
- return ntvGetType(getNumericTypeValue(getProperty(c)));
- }
- @Override
- int getMaxValue(int which) {
- return NumericType.COUNT-1;
- }
- }
-
- @WeakOuter
- private class IntProperty_HangulSyllableType extends IntProperty {
- IntProperty_HangulSyllableType() {
- super(SRC_PROPSVEC);
- }
- @Override
- int getValue(int c) {
- /* see comments on gcbToHst[] above */
- int gcb=(getAdditional(c, 2)&GCB_MASK)>>>GCB_SHIFT;
- if(gcb<gcbToHst.length) {
- return gcbToHst[gcb];
- } else {
- return HangulSyllableType.NOT_APPLICABLE;
- }
- }
- @Override
- int getMaxValue(int which) {
- return HangulSyllableType.COUNT-1;
- }
- }
-
IntProperty intProps[]={
new BiDiIntProperty() { // BIDI_CLASS
@Override
@@ -580,7 +525,16 @@ public final class UCharacterProperty
},
new IntProperty(2, DECOMPOSITION_TYPE_MASK_, 0),
new IntProperty(0, EAST_ASIAN_MASK_, EAST_ASIAN_SHIFT_),
- new IntProperty_GeneralCategory(), // GENERAL_CATEGORY
+ new @WeakOuter IntProperty(SRC_CHAR) { // GENERAL_CATEGORY
+ @Override
+ int getValue(int c) {
+ return getType(c);
+ }
+ @Override
+ int getMaxValue(int which) {
+ return UCharacterCategory.CHAR_CATEGORY_COUNT-1;
+ }
+ },
new BiDiIntProperty() { // JOINING_GROUP
@Override
int getValue(int c) {
@@ -594,14 +548,38 @@ public final class UCharacterProperty
}
},
new IntProperty(2, LB_MASK, LB_SHIFT), // LINE_BREAK
- new IntProperty_NumericType(), // NUMERIC_TYPE
+ new @WeakOuter IntProperty(SRC_CHAR) { // NUMERIC_TYPE
+ @Override
+ int getValue(int c) {
+ return ntvGetType(getNumericTypeValue(getProperty(c)));
+ }
+ @Override
+ int getMaxValue(int which) {
+ return NumericType.COUNT-1;
+ }
+ },
new IntProperty(0, SCRIPT_MASK_, 0) {
@Override
int getValue(int c) {
return UScript.getScript(c);
}
},
- new IntProperty_HangulSyllableType(), // HANGUL_SYLLABLE_TYPE
+ new @WeakOuter IntProperty(SRC_PROPSVEC) { // HANGUL_SYLLABLE_TYPE
+ @Override
+ int getValue(int c) {
+ /* see comments on gcbToHst[] above */
+ int gcb=(getAdditional(c, 2)&GCB_MASK)>>>GCB_SHIFT;
+ if(gcb<gcbToHst.length) {
+ return gcbToHst[gcb];
+ } else {
+ return HangulSyllableType.NOT_APPLICABLE;
+ }
+ }
+ @Override
+ int getMaxValue(int which) {
+ return HangulSyllableType.COUNT-1;
+ }
+ },
// max=1=YES -- these are never "maybe", only "no" or "yes"
new NormQuickCheckIntProperty(SRC_NFC, UProperty.NFD_QUICK_CHECK, 1),
new NormQuickCheckIntProperty(SRC_NFKC, UProperty.NFKD_QUICK_CHECK, 1),
|
Project health: use WeakOuter as a type annotation in UCharacterProperty.
|
google_j2objc
|
train
|
0c98afb66fe020cdcb65df97d412cdbaacbbdd58
|
diff --git a/mode/clike/clike.js b/mode/clike/clike.js
index <HASH>..<HASH> 100644
--- a/mode/clike/clike.js
+++ b/mode/clike/clike.js
@@ -123,7 +123,7 @@ CodeMirror.defineMode("clike", function(config, parserConfig) {
if (style == "comment" || style == "meta") return style;
if (ctx.align == null) ctx.align = true;
- if ((curPunc == ";" || curPunc == ":") && ctx.type == "statement") popContext(state);
+ if ((curPunc == ";" || curPunc == ":" || curPunc == ",") && ctx.type == "statement") popContext(state);
else if (curPunc == "{") pushContext(state, stream.column(), "}");
else if (curPunc == "[") pushContext(state, stream.column(), "]");
else if (curPunc == "(") pushContext(state, stream.column(), ")");
|
fix extra indentation in array initialization
array initialization (one entry per line) results in extra indentation from second element onwards:
ex:-
const string[] fields = {
"one",
"two",
"three"
};
|
codemirror_CodeMirror
|
train
|
26320feb65ce5053f681299ecda1e86453b91903
|
diff --git a/Gemfile.lock b/Gemfile.lock
index <HASH>..<HASH> 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -1,7 +1,7 @@
PATH
remote: .
specs:
- ricordami (0.0.3)
+ ricordami (0.0.4)
activemodel (~> 3.0.0)
activesupport (~> 3.0.0)
redis (~> 2.1.1)
diff --git a/lib/ricordami/has_attributes.rb b/lib/ricordami/has_attributes.rb
index <HASH>..<HASH> 100644
--- a/lib/ricordami/has_attributes.rb
+++ b/lib/ricordami/has_attributes.rb
@@ -36,6 +36,7 @@ module Ricordami
attr_reader :attributes
def initialize(attrs = {})
+ attrs = {} if attrs.nil?
@attributes = {}.with_indifferent_access
@reloading = false
update_mem_attributes(attrs) unless attrs.empty?
diff --git a/lib/ricordami/version.rb b/lib/ricordami/version.rb
index <HASH>..<HASH> 100644
--- a/lib/ricordami/version.rb
+++ b/lib/ricordami/version.rb
@@ -1,3 +1,3 @@
module Ricordami
- VERSION = "0.0.3"
+ VERSION = "0.0.4"
end
diff --git a/spec/ricordami/has_attributes_spec.rb b/spec/ricordami/has_attributes_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/ricordami/has_attributes_spec.rb
+++ b/spec/ricordami/has_attributes_spec.rb
@@ -38,6 +38,12 @@ describe Ricordami::HasAttributes do
attribute.initial_value.should == "2"
attribute.initial_value.should == "3"
end
+
+ it "can create a new instance without parameters, with nil or an empty hash" do
+ Boat.new.attributes.should == {"id" => nil}
+ Boat.new(nil).attributes.should == {"id" => nil}
+ Boat.new({}).attributes.should == {"id" => nil}
+ end
end
describe "an instance" do
diff --git a/spec/ricordami/model_spec.rb b/spec/ricordami/model_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/ricordami/model_spec.rb
+++ b/spec/ricordami/model_spec.rb
@@ -28,9 +28,6 @@ describe Ricordami::Model do
user.to_model.should == user
end
- it "has a simple to_s method" do
- end
-
describe "#to_key" do
before(:each) do
@user = User.new
|
allow to pass nil when creating a new instance
|
mathieul_ricordami
|
train
|
ec4b7c635fb59007eaa6aea5a4e267f05e6d4c2e
|
diff --git a/src/js/panels-viewer.js b/src/js/panels-viewer.js
index <HASH>..<HASH> 100644
--- a/src/js/panels-viewer.js
+++ b/src/js/panels-viewer.js
@@ -42,12 +42,6 @@ var panelsViewer = {
// get the base panels
var panels = Panels.get();
- // figure out if lineage should be drawn
- patternData.lineageExists = (patternData.lineage.length !== 0);
-
- // figure out if reverse lineage should be drawn
- patternData.lineageRExists = (patternData.lineageR.length !== 0);
-
// evaluate panels array and create content
for (var i = 0; i < panels.length; ++i) {
@@ -97,6 +91,12 @@ var panelsViewer = {
var patternPartial = patternData.patternPartial;
patternData.panels = panels;
+ // add *Exists attributes for Hogan templates
+ patternData = this.setExists(patternData);
+
+ // set isPatternView based on if we have to pass it back to the styleguide level
+ patternData.isPatternView = (iframePassback === false);
+
// render all of the panels in the base panel template
template = document.getElementById('pl-panel-template-base');
templateCompiled = Hogan.compile(template.innerHTML);
@@ -159,6 +159,30 @@ var panelsViewer = {
},
/**
+ * set the various *Exists needed for the template view
+ */
+ setExists: function(pD) {
+
+ // figure out if the description exists
+ pD.patternDescExists = ((pD.patternDesc !== '') && ((pD.patternDescAdditions !== undefined) && (pD.patternDescAdditions.length > 0)));
+
+ // figure out if lineage should be drawn
+ pD.lineageExists = (pD.lineage.length !== 0);
+
+ // figure out if reverse lineage should be drawn
+ pD.lineageRExists = (pD.lineageR.length !== 0);
+
+ // figure out if pattern state should be drawn
+ pD.patternStateExists = (pD.patternState !== undefined);
+
+ // figure if the entire desc block should be drawn
+ pD.descBlockExists = (pD.patternDescExists || pD.lineageExists || pD.lineageRExists || pD.patternStateExists);
+
+ return pD;
+
+ },
+
+ /**
* clear any selection of code when swapping tabs or opening a new pattern
*/
clear: function() {
|
added method to add *Exists properties
|
pattern-lab_styleguidekit-assets-default
|
train
|
d5924ef0b4545084d26bff4287976816adb9b944
|
diff --git a/buffer.go b/buffer.go
index <HASH>..<HASH> 100644
--- a/buffer.go
+++ b/buffer.go
@@ -16,23 +16,30 @@ type bufferedReader struct {
bufferRead int
bufferSize int
sniffing bool
+ lastErr error
}
func (s *bufferedReader) Read(p []byte) (int, error) {
- // Functionality of bytes.Reader.
- bn := copy(p, s.buffer.Bytes()[s.bufferRead:s.bufferSize])
- s.bufferRead += bn
-
- p = p[bn:]
+ if s.bufferSize > s.bufferRead {
+ // If we have already read something from the buffer before, we return the
+ // same data and the last error if any. We need to immediately return,
+ // otherwise we may block for ever, if we try to be smart and call
+ // source.Read() seeking a little bit of more data.
+ bn := copy(p, s.buffer.Bytes()[s.bufferRead:s.bufferSize])
+ s.bufferRead += bn
+ return bn, s.lastErr
+ }
- // Funtionality of io.TeeReader.
+ // If there is nothing more to return in the sniffed buffer, read from the
+ // source.
sn, sErr := s.source.Read(p)
if sn > 0 && s.sniffing {
+ s.lastErr = sErr
if wn, wErr := s.buffer.Write(p[:sn]); wErr != nil {
- return bn + wn, wErr
+ return wn, wErr
}
}
- return bn + sn, sErr
+ return sn, sErr
}
func (s *bufferedReader) reset(snif bool) {
diff --git a/cmux_test.go b/cmux_test.go
index <HASH>..<HASH> 100644
--- a/cmux_test.go
+++ b/cmux_test.go
@@ -279,7 +279,13 @@ func TestHTTP2(t *testing.T) {
t.Fatal(err)
}
var b [len(http2.ClientPreface)]byte
- if _, err := muxedConn.Read(b[:]); err != io.EOF {
+ var n int
+ // We have the sniffed buffer first...
+ if n, err = muxedConn.Read(b[:]); err == io.EOF {
+ t.Fatal(err)
+ }
+ // and then we read from the source.
+ if _, err = muxedConn.Read(b[n:]); err != io.EOF {
t.Fatal(err)
}
if string(b[:]) != http2.ClientPreface {
|
Fix a blocking issue in buffer reader
After sniffing and buffering data, if we try to read from
the socket again, bufio.Reader may block. This breaks HTTP handlers
in go<I>+ if one tries on browsers or with curl. Go's HTTP client,
however, is not broken. This issue is also there with TeeReader.
Return immediately with the data in the sniffed buffer.
|
soheilhy_cmux
|
train
|
b74d7d9262de622bfa38342c34011e81f5ccf48e
|
diff --git a/api-audit/src/main/java/com/capitalone/dashboard/evaluator/PerformanceTestResultEvaluator.java b/api-audit/src/main/java/com/capitalone/dashboard/evaluator/PerformanceTestResultEvaluator.java
index <HASH>..<HASH> 100644
--- a/api-audit/src/main/java/com/capitalone/dashboard/evaluator/PerformanceTestResultEvaluator.java
+++ b/api-audit/src/main/java/com/capitalone/dashboard/evaluator/PerformanceTestResultEvaluator.java
@@ -41,7 +41,7 @@ public class PerformanceTestResultEvaluator extends Evaluator<PerformanceTestAud
@Override
public Collection<PerformanceTestAuditResponse> evaluate(Dashboard dashboard, long beginDate, long endDate, Map<?, ?> dummy) throws AuditException {
- List<CollectorItem> testItems = getCollectorItems(dashboard, "test", CollectorType.Test);
+ List<CollectorItem> testItems = getCollectorItems(dashboard, "codeanalysis", CollectorType.Test);
Collection<TestResultsAuditResponse> responses = new ArrayList<>();
if (CollectionUtils.isEmpty(testItems)) {
throw new AuditException("No tests configured", AuditException.NO_COLLECTOR_ITEM_CONFIGURED);
|
Update PerformanceTestResultEvaluator.java (#<I>)
|
Hygieia_Hygieia
|
train
|
3b83fac2125f2968e4a775b1ff609a6ef87f41fe
|
diff --git a/src/Neuron/Models/Helpers/Errorable.php b/src/Neuron/Models/Helpers/Errorable.php
index <HASH>..<HASH> 100644
--- a/src/Neuron/Models/Helpers/Errorable.php
+++ b/src/Neuron/Models/Helpers/Errorable.php
@@ -32,6 +32,14 @@ abstract class Errorable
}
/**
+ * Set the error array. By reference!
+ * @param array $errors
+ */
+ public function setErrors (array &$errors){
+ $this->errors = $errors;
+ }
+
+ /**
* @return string|null
*/
public function getError ()
|
Adding setERrors to Errorable.
|
CatLabInteractive_Neuron
|
train
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.