hash
stringlengths
40
40
diff
stringlengths
131
114k
message
stringlengths
7
980
project
stringlengths
5
67
split
stringclasses
1 value
92ee7438d5b54f48122540174ef9204db654f7f7
diff --git a/src/TestUtils/CloudFunctionDeploymentTrait.php b/src/TestUtils/CloudFunctionDeploymentTrait.php index <HASH>..<HASH> 100644 --- a/src/TestUtils/CloudFunctionDeploymentTrait.php +++ b/src/TestUtils/CloudFunctionDeploymentTrait.php @@ -23,6 +23,7 @@ use Google\Cloud\TestUtils\GcloudWrapper\CloudFunction; use Google\Cloud\TestUtils\EventuallyConsistentTestTrait; use GuzzleHttp\Client; use GuzzleHttp\HandlerStack; +use PHPUnit\Framework\ExpectationFailedException; /** * Trait CloudFunctionDeploymentTrait.
chore(functions): Fix missing use reference
GoogleCloudPlatform_php-tools
train
3cf6b1d847c1112434a468134c301b89eb5bc473
diff --git a/lib/label/add-label.js b/lib/label/add-label.js index <HASH>..<HASH> 100644 --- a/lib/label/add-label.js +++ b/lib/label/add-label.js @@ -31,7 +31,7 @@ function addLabel(root, node, location) { y = (-labelBBox.height / 2); } labelSvg.attr("transform", - "translate(" + (-labelBBox.width / 2) + "," + y + ")"); + "translate(" + (-labelBBox.x - labelBBox.width / 2) + "," + y + ")"); return labelSvg; }
Fix positioning of 'text-anchor: middle' node labels.
smbolton_dagre-d3v4
train
d9b12055ebbd3f82b890f8df713ad7c0fe1309ba
diff --git a/src/Link.php b/src/Link.php index <HASH>..<HASH> 100644 --- a/src/Link.php +++ b/src/Link.php @@ -7,27 +7,30 @@ use Selene\Matisse\VisualComponent; class LinkAttributes extends VisualComponentAttributes { - public $label; - public $url; - public $disabled = false; - public $tooltip; - public $script; public $action; + public $activeClass = 'active'; + public $disabled = false; + public $label; public $param; + public $script; + public $tooltip; + public $href; - protected function typeof_label () { return AttributeType::TEXT; } + protected function typeof_action () { return AttributeType::ID; } - protected function typeof_url () { return AttributeType::TEXT; } + protected function typeof_active_class () { return AttributeType::TEXT; } protected function typeof_disabled () { return AttributeType::BOOL; } - protected function typeof_tooltip () { return AttributeType::TEXT; } + protected function typeof_label () { return AttributeType::TEXT; } + + protected function typeof_param () { return AttributeType::TEXT; } protected function typeof_script () { return AttributeType::TEXT; } - protected function typeof_action () { return AttributeType::ID; } + protected function typeof_tooltip () { return AttributeType::TEXT; } - protected function typeof_param () { return AttributeType::TEXT; } + protected function typeof_href () { return AttributeType::TEXT; } } class Link extends VisualComponent @@ -54,24 +57,36 @@ class Link extends VisualComponent return new LinkAttributes($this); } + protected function preRender () + { + global $application; + $attr = $this->attrs (); + + if ($application->VURI == $attr->href) + $this->cssClassName = $attr->activeClass; + + parent::preRender(); + } + protected function render () { - $script = $this->attrs ()->action ? "doAction('{$this->attrs()->action}','{$this->attrs()->param}')" - : $this->attrs ()->script; + $attr = $this->attrs (); + + $script = $attr->action ? "doAction('{$this->attrs()->action}','{$this->attrs()->param}')" + : $attr->script; - $this->addAttribute ('title', $this->attrs ()->tooltip); - $this->addAttribute ('href', $this->attrs ()->disabled + $this->addAttribute ('title', $attr->tooltip); + $this->addAttribute ('href', $attr->disabled ? '#' : - (isset($this->attrs ()->url) + (isset($attr->href) ? - $this->attrs ()->url + $attr->href : "javascript:$script" ) ); $this->beginContent (); - $this->setContent ($this->attrs ()->label); + $this->setContent ($attr->label); } } -
Added autohilight to link component.
electro-modules_matisse-components
train
b174f98e161e2fbbe85dc29f2f373179c17ae3ad
diff --git a/defaultschema.js b/defaultschema.js index <HASH>..<HASH> 100644 --- a/defaultschema.js +++ b/defaultschema.js @@ -83,12 +83,12 @@ export class HardBreak extends Inline { // ;; The default emphasis mark type. export class EmMark extends MarkType { - static get rank() { return 51 } + static get rank() { return 31 } } // ;; The default strong mark type. export class StrongMark extends MarkType { - static get rank() { return 52 } + static get rank() { return 32 } } // ;; The default link mark type. Has these attributes: @@ -96,7 +96,7 @@ export class StrongMark extends MarkType { // - **`href`** (required): The link target. // - **`title`**: The link's title. export class LinkMark extends MarkType { - static get rank() { return 25 } + static get rank() { return 60 } get attrs() { return { href: new Attribute,
Overhaul algorithm that renders marks in Markdown So that it only 'mixes' opening and closing syntax when allowed. Issue #<I>
ProseMirror_prosemirror-model
train
9a127cd141c1bca9863cd4e9e1963cb6c0088923
diff --git a/test/e2e/logging.test.js b/test/e2e/logging.test.js index <HASH>..<HASH> 100644 --- a/test/e2e/logging.test.js +++ b/test/e2e/logging.test.js @@ -4,6 +4,7 @@ const path = require("path"); const fs = require("graceful-fs"); const webpack = require("webpack"); const Server = require("../../lib/Server"); +const HTMLGeneratorPlugin = require("../helpers/html-generator-plugin"); const config = require("../fixtures/client-config/webpack.config"); const runBrowser = require("../helpers/run-browser"); const port = require("../ports-map").logging; @@ -73,6 +74,7 @@ describe("logging", () => { ); }, }, + new HTMLGeneratorPlugin(), ], }, }, @@ -90,6 +92,7 @@ describe("logging", () => { ); }, }, + new HTMLGeneratorPlugin(), ], }, }, @@ -142,6 +145,7 @@ describe("logging", () => { ); }, }, + new HTMLGeneratorPlugin(), ], }, devServerOptions: { @@ -167,6 +171,7 @@ describe("logging", () => { ); }, }, + new HTMLGeneratorPlugin(), ], }, devServerOptions: { @@ -205,7 +210,7 @@ describe("logging", () => { consoleMessages.push(message); }); - await page.goto(`http://localhost:${port}/main`, { + await page.goto(`http://localhost:${port}/`, { waitUntil: "networkidle0", });
test: migrate logging test on `HTMLGeneratorPlugin` (#<I>)
webpack_webpack-dev-server
train
918cc24937497094b8a0eddf3a3d30f57b7e54aa
diff --git a/test/resources/metrics_test.js b/test/resources/metrics_test.js index <HASH>..<HASH> 100644 --- a/test/resources/metrics_test.js +++ b/test/resources/metrics_test.js @@ -22,9 +22,10 @@ describe('Metrics', function() { }); }); + // 1424377740 it('encodes time ranges', function() { var metrics = new Metrics(helper.client); - var since = new Date(2015, 1, 19, 15, 29, 00, 00); + var since = new Date(2015, 1, 19, 15, 29, 0, 0); return metrics.retrieve({ since: since }).then(function(metrics) { expect(metrics.nps).to.eq(10); diff --git a/test/test_helper.js b/test/test_helper.js index <HASH>..<HASH> 100644 --- a/test/test_helper.js +++ b/test/test_helper.js @@ -32,6 +32,13 @@ var requests = { body: { nps: 0 } }, + // 1424377740 <- Linux + '/metrics?since=1424377740': { + status: 200, + body: { nps: 10 } + }, + + // 1424381340 <- OS X '/metrics?since=1424381340': { status: 200, body: { nps: 10 }
Supply alternate metric timestamp mock url Differences in UTC time between platforms cause tests to fail in CI.
delighted_delighted-node
train
04d64bba940d6e36fb4e09b8c79719f4722a663a
diff --git a/salt/modules/nilrt_ip.py b/salt/modules/nilrt_ip.py index <HASH>..<HASH> 100644 --- a/salt/modules/nilrt_ip.py +++ b/salt/modules/nilrt_ip.py @@ -12,9 +12,9 @@ import configparser import os # Import salt libs -import salt.utils.validate.net import salt.exceptions - +import salt.utils.files +import salt.utils.validate.net # Import 3rd-party libs from salt.ext import six @@ -240,7 +240,7 @@ def _get_static_info(interface): parser = configparser.ConfigParser() if os.path.exists(INTERFACES_CONFIG): try: - with salt.utils.fopen(INTERFACES_CONFIG, 'r') as config_file: + with salt.utils.files.fopen(INTERFACES_CONFIG, 'r') as config_file: parser.read_file(config_file) except configparser.MissingSectionHeaderError: pass @@ -436,7 +436,7 @@ def _configure_static_interface(interface, **settings): parser = configparser.ConfigParser() if os.path.exists(INTERFACES_CONFIG): try: - with salt.utils.fopen(INTERFACES_CONFIG, 'r') as config_file: + with salt.utils.files.fopen(INTERFACES_CONFIG, 'r') as config_file: parser.read_file(config_file) except configparser.MissingSectionHeaderError: pass @@ -455,7 +455,7 @@ def _configure_static_interface(interface, **settings): parser.set('interface_{0}'.format(hwaddr_section_number), 'Name', name) parser.set('interface_{0}'.format(hwaddr_section_number), 'MAC', hwaddr) parser.set('interface_{0}'.format(hwaddr_section_number), 'Type', 'ethernet') - with salt.utils.fopen(INTERFACES_CONFIG, 'w') as config_file: + with salt.utils.files.fopen(INTERFACES_CONFIG, 'w') as config_file: parser.write(config_file) return True
Update old utils paths with new paths
saltstack_salt
train
903997b37ee06fdfbf92c8f13559f3f1fe0e3416
diff --git a/packages/build/src/index.js b/packages/build/src/index.js index <HASH>..<HASH> 100644 --- a/packages/build/src/index.js +++ b/packages/build/src/index.js @@ -47,6 +47,8 @@ module.exports = ({ return require.resolve("./marko-compiler"); })(); + const isMarko5 = !markoCompiler.createBuilder; + const legacyBrowsers = browserslist.loadConfig({ path: dir || file, @@ -73,13 +75,17 @@ module.exports = ({ "source-map-support": useAppModuleOrFallback(APP_DIR, "source-map-support") }); + const babelConfig = targets => ({ + presets: [[require.resolve("@babel/preset-env"), { targets }]], + plugins: [require.resolve("babel-plugin-macros")], + babelrc: false, + configFile: false + }); + const babelLoader = targets => ({ loader: require.resolve("babel-loader"), options: { - presets: [[require.resolve("@babel/preset-env"), { targets }]], - plugins: [require.resolve("babel-plugin-macros")], - babelrc: false, - configFile: false, + ...babelConfig(targets), cacheDirectory: true } }); @@ -92,15 +98,13 @@ module.exports = ({ }, { test: /\.marko$/, - use: [ - babelLoader(targets), - { - loader: require.resolve("@marko/webpack/loader"), - options: { - compiler: markoCompiler - } + use: (isMarko5 ? [] : [babelLoader(targets)]).concat({ + loader: require.resolve("@marko/webpack/loader"), + options: { + compiler: markoCompiler, + babelConfig: isMarko5 && babelConfig(targets) } - ] + }) }, { test: /\.css$/,
feat(build): marko 5 support
marko-js_cli
train
d8a192f181a426076d6252e45f28865c0de4e9e9
diff --git a/test/backup_test.go b/test/backup_test.go index <HASH>..<HASH> 100644 --- a/test/backup_test.go +++ b/test/backup_test.go @@ -819,3 +819,93 @@ func ensureRemoteBackup(ctx context.Context, b driver.ClientBackup, t *testing.T } return id } + +func TestBackupRestoreWithViews(t *testing.T) { + c := createClientFromEnv(t, true) + skipIfNoBackup(c, t) + ctx := context.Background() + b := c.Backup() + + isSingle := false + if role, err := c.ServerRole(ctx); err != nil { + t.Fatalf("Failed to obtain server role: %s", describe(err)) + } else { + isSingle = role == driver.ServerRoleSingle + } + + dbname := "backup" + colname := "col_views_docs" + viewname := "backup_view" + + trueVar := true + + db := ensureDatabase(ctx, c, dbname, nil, t) + col := ensureCollection(ctx, db, colname, nil, t) + ensureArangoSearchView(ctx, db, viewname, &driver.ArangoSearchViewProperties{ + Links: driver.ArangoSearchLinks{ + colname: driver.ArangoSearchElementProperties{ + IncludeAllFields: &trueVar, + }, + }, + }, t) + + const numThreads = 10 + const numDocs = 10000 + const totalNumDocs = numThreads * numDocs + + var wg sync.WaitGroup + for k := 0; k < numThreads; k++ { + wg.Add(1) + go func(i int) { + defer wg.Done() + + for j := 0; j < numDocs; j++ { + + book := BookWithAuthor{ + Title: fmt.Sprintf("Hello World - %d", j), + Author: fmt.Sprintf("Author - %d", i), + } + + _, err := col.CreateDocument(ctx, book) + if err != nil { + t.Fatalf("Failed to create document %s", describe(err)) + } + } + }(k) + } + wg.Wait() + + id, _, err := b.Create(ctx, nil) + if err != nil { + t.Fatalf("Failed to create backup: %s", describe(err)) + } + + // Now restore + if err := b.Restore(ctx, id, nil); err != nil { + t.Fatalf("Failed to restore backup: %s", describe(err)) + } + + if isSingle { + waitctx, cancel := context.WithTimeout(ctx, 30*time.Second) + defer cancel() + waitForServerRestart(waitctx, c, t) + } + + // run query to get document count of view + cursor, err := db.Query(ctx, fmt.Sprintf("FOR x IN %s COLLECT WITH COUNT INTO n RETURN n", viewname), nil) + if err != nil { + t.Fatalf("Failed to create query: %s", describe(err)) + } + + defer cursor.Close() + + var numDocumentsInView int + _, err = cursor.ReadDocument(ctx, &numDocumentsInView) + if err != nil { + t.Fatalf("Failed to get document count: %s", describe(err)) + } + + if numDocumentsInView != totalNumDocs { + t.Errorf("Wrong number of documents: found: %d, expected: %d", numDocumentsInView, totalNumDocs) + } +} diff --git a/test/types.go b/test/types.go index <HASH>..<HASH> 100644 --- a/test/types.go +++ b/test/types.go @@ -42,6 +42,11 @@ type Book struct { Title string } +type BookWithAuthor struct { + Title string + Author string +} + type RouteEdge struct { From string `json:"_from,omitempty"` To string `json:"_to,omitempty"`
Added test for views in backup. (#<I>)
arangodb_go-driver
train
b2da1cbcbf968e49f1fadf6c7d03b144c579e84d
diff --git a/lib/ostatus/atom.js b/lib/ostatus/atom.js index <HASH>..<HASH> 100644 --- a/lib/ostatus/atom.js +++ b/lib/ostatus/atom.js @@ -24,6 +24,7 @@ var Sys = require('sys'), Url = require('url'), + Util = require('util'), Http = require('./http.js'), Xml = require('o3-xml'); Path = require('path'), @@ -50,17 +51,28 @@ function parseFeed(url, callback) { try { var doc = Xml.parseFromString(body); var childNodes = doc.documentElement.childNodes; - var entries = []; + var feed = {}; + feed.items = []; + feed.links = []; + feed.url = url; + var items = []; for (var i=0; i<childNodes.length; i++) { var name = childNodes[i].nodeName; if (name == "entry") { var entry = _readEntry(childNodes[i]); - if (entry != null) { - entries.push(entry); - } + if (entry) feed.items.push(entry); + } else if (name == "link") { + var link = _readLink(childNodes[i]); + if (link) feed.links.push(link); + } else if (name == "title") { + feed.title = childNodes[i].nodeValue; + } else if (name == "subtitle") { + feed.subtitle = childNodes[i].nodeValue; + } else if (name == "updated") { + feed.updated = childNodes[i].nodeValue; } } - callback(null, entries); + callback(null, feed); } catch (exception) { callback(exception); } @@ -74,6 +86,11 @@ var _elements = { "updated": _parseUpdated }; +function _readLink(node) { + var link = _readAttributes(node, ["href", "rel", "type"]); + return link; +} + function _readEntry(node) { var childNodes = node.childNodes; var entry = {}; @@ -86,6 +103,15 @@ function _readEntry(node) { return entry; } +function _readAttributes(node, attributes) { + var result = {}; + for(i= 0; i<attributes.length; i++) { + var name = attributes[i]; + if (attribute = node.attributes.getNamedItem(name)) result[name] = attribute.value; + } + return result; +} + function _parseTitle(node, entry) { if (node != null) { entry["title"] = node.nodeValue;
Capture meta data such as title/subtitle/links etc. Entries are now in "items".
eschnou_node-ostatus
train
e9e30c0bd80b00264ece81e7a596c36084cd46d3
diff --git a/skyfield/sgp4lib.py b/skyfield/sgp4lib.py index <HASH>..<HASH> 100644 --- a/skyfield/sgp4lib.py +++ b/skyfield/sgp4lib.py @@ -192,7 +192,7 @@ class EarthSatellite(VectorFunction): R = _T(TEME.rotation_at(t)) r = mxv(R, r) v = mxv(R, v) - return r, v, r, error + return r, v, None, error def find_events(self, topos, t0, t1, altitude_degrees=0.0): """Return the times at which the satellite rises, culminates, and sets. diff --git a/skyfield/toposlib.py b/skyfield/toposlib.py index <HASH>..<HASH> 100644 --- a/skyfield/toposlib.py +++ b/skyfield/toposlib.py @@ -46,7 +46,7 @@ class ITRSPosition(VectorFunction): RT = _T(itrs.rotation_at(t)) r = mxv(RT, r) v = mxv(RT, v) - return r, v, r, None + return r, v, None, None class GeographicPosition(ITRSPosition): """The position of a latitude and longitude on Earth. diff --git a/skyfield/vectorlib.py b/skyfield/vectorlib.py index <HASH>..<HASH> 100644 --- a/skyfield/vectorlib.py +++ b/skyfield/vectorlib.py @@ -183,8 +183,8 @@ class ReversedVector(VectorFunction): return self.vector_function def _at(self, t): - p, v, gcrs_position, message = self.vector_function._at(t) - return -p, -v, gcrs_position, message + p, v, _, message = self.vector_function._at(t) + return -p, -v, None, message class VectorSum(VectorFunction): def __init__(self, center, target, vector_functions): @@ -211,12 +211,15 @@ class VectorSum(VectorFunction): def _at(self, t): p, v = 0.0, 0.0 gcrs_position = None - for vf in self.vector_functions: - p2, v2, another_gcrs_position, message = vf._at(t) - if gcrs_position is None: # TODO: so bootleg; rework whole idea - gcrs_position = another_gcrs_position + vfs = self.vector_functions + for vf in vfs: + p2, v2, _, message = vf._at(t) + if vf.center == 399: + gcrs_position = -p p += p2 v += v2 + if vfs[0].center == 0 and vf.center == 399: + gcrs_position = p2 return p, v, gcrs_position, message def _correct_for_light_travel_time(observer, target):
Eliminate 3rd return value from all but one _at() This small simplification took quite a few tries to get right, but starts moving us along again toward #<I>.
skyfielders_python-skyfield
train
7a7435fd81aa155080ad5a78ca62ab6ae7461cdd
diff --git a/regions/io/core.py b/regions/io/core.py index <HASH>..<HASH> 100644 --- a/regions/io/core.py +++ b/regions/io/core.py @@ -12,11 +12,11 @@ from astropy.coordinates import BaseCoordinateFrame, Angle from astropy import log from .. import shapes -from ..core import PixCoord, SkyRegion +from ..core import PixCoord, SkyRegion, RegionMeta, RegionVisual from .ds9.core import DS9RegionParserWarning, DS9RegionParserError from .crtf.core import CRTFRegionParserWarning, CRTFRegionParserError -__all__ = ['ShapeList', 'Shape', 'to_shape_list'] +__all__ = ['ShapeList', 'Shape', 'to_shape_list', 'to_crtf_meta', 'to_ds9_meta'] regions_attributes = dict(circle=['center', 'radius'], ellipse=['center', 'width', 'height', 'angle'], @@ -349,7 +349,7 @@ class Shape(object): coords = self.convert_coords() log.debug(coords) - viz_keywords = ['color', 'dashed', 'width', 'point', 'font', 'symsize', 'symsize', 'fontsize', 'fontstyle', + viz_keywords = ['color', 'dash', 'dashlist', 'width', 'font', 'symsize', 'symsize', 'fontsize', 'fontstyle', 'usetex', 'labelpos', 'labeloff', 'linewidth', 'linestyle'] if isinstance(coords[0], BaseCoordinateFrame): @@ -359,8 +359,8 @@ class Shape(object): else: self._raise_error("No central coordinate") - reg.visual = OrderedDict() - reg.meta = OrderedDict() + reg.visual = RegionVisual() + reg.meta = RegionMeta() for key in self.meta.keys(): if key in viz_keywords: reg.visual[key] = self.meta[key] @@ -430,10 +430,51 @@ def to_shape_list(region_list, format_type='DS9', coordinate_system='fk5'): new_coord.append(Angle(val.transform_to(frame).spherical.lon)) new_coord.append(Angle(val.transform_to(frame).spherical.lat)) - meta = copy.deepcopy(region.meta) - meta.update(region.visual) + if format_type == 'DS9': + meta = to_ds9_meta(region.meta, region.visual) + else: + meta = to_crtf_meta(region.meta, region.visual) shape_list.append(Shape(format_type, coordsys, reg_type, new_coord, meta, False, region.meta.get('include', False))) return shape_list + + +def to_ds9_meta(region_meta, region_visual): + + valid_keys = ['label', 'symbol', 'include', 'tag', 'line', 'comment', 'name', 'select', 'highlite', 'fixed', + 'edit', 'move', 'rotate', 'delete', 'source', 'background'] # meta keys allowed in DS9 + valid_keys += ['color', 'dash', 'linewidth', 'font', 'dashlist', 'fill'] # visual keys allowed in DS9 + + key_mappings = {'symbol': 'point', 'label': 'text', 'linewidth': 'width'} # mapped to actual names in DS9 + + meta = dict() + for key in region_meta: + if key in valid_keys: + meta[key_mappings.get(key, key)] = region_meta[key] + for key in region_visual: + if key in valid_keys: + meta[key_mappings.get(key, key)] = region_visual[key] + + return meta + + +def to_crtf_meta(region_meta, region_visual): + + valid_keys = ['label', 'symbol', 'include', 'frame', 'range', 'veltype', 'restfreq', 'coord'] # meta keys allowed in CRTF + valid_keys += ['color', 'width', 'font', 'symthick', 'symsize', 'fontsize', 'fontstyle', + 'usetex', 'labelpos', 'labeloff', 'linewidth', 'linestyle'] # visual keys allowed in CRTF + + key_mappings = {} + + meta = dict() + + for key in region_meta: + if key in valid_keys: + meta[key_mappings.get(key, key)] = region_meta[key] + for key in region_visual: + if key in valid_keys: + meta[key_mappings.get(key, key)] = region_visual[key] + + return meta
added to_ds9_meta and to_crtf_meta to make meta attributes compatible with their respective format
astropy_regions
train
8b8d78c44ee255e801a275513d27dcfc13ccaeb2
diff --git a/src/test/java/com/maxmind/geoip2/DatabaseReaderTest.java b/src/test/java/com/maxmind/geoip2/DatabaseReaderTest.java index <HASH>..<HASH> 100644 --- a/src/test/java/com/maxmind/geoip2/DatabaseReaderTest.java +++ b/src/test/java/com/maxmind/geoip2/DatabaseReaderTest.java @@ -5,6 +5,8 @@ import com.maxmind.geoip2.exception.AddressNotFoundException; import com.maxmind.geoip2.exception.GeoIp2Exception; import com.maxmind.geoip2.model.*; import com.maxmind.geoip2.model.ConnectionTypeResponse.ConnectionType; +import com.maxmind.geoip2.record.City; +import com.maxmind.geoip2.record.Country; import org.junit.Before; import org.junit.Rule; import org.junit.Test; @@ -202,6 +204,10 @@ public class DatabaseReaderTest { assertFalse(response.isPublicProxy()); assertFalse(response.isTorExitNode()); assertEquals(ipAddress.getHostAddress(), response.getIpAddress()); + + AnonymousIpResponse tryResponse = reader.tryAnonymousIp(ipAddress).get(); + assertEquals(response.toJson(), tryResponse.toJson()); + reader.close(); } @@ -215,10 +221,32 @@ public class DatabaseReaderTest { assertEquals("Telstra Pty Ltd", response.getAutonomousSystemOrganization()); assertEquals(ipAddress.getHostAddress(), response.getIpAddress()); + + AsnResponse tryResponse = reader.tryAsn(ipAddress).get(); + assertEquals(response.toJson(), tryResponse.toJson()); + reader.close(); } @Test + public void testCity() throws Exception { + try (DatabaseReader reader = new DatabaseReader.Builder( + getFile("GeoIP2-City-Test.mmdb")).build(); + ) { + InetAddress ipAddress = InetAddress.getByName("81.2.69.192"); + + CityResponse response = reader.city(ipAddress); + assertEquals(2635167, response.getCountry().getGeoNameId().intValue()); + assertEquals(100, response.getLocation().getAccuracyRadius().intValue()); + assertFalse(response.getTraits().isLegitimateProxy()); + assertEquals(ipAddress.getHostAddress(), response.getTraits().getIpAddress()); + + CityResponse tryResponse = reader.tryCity(ipAddress).get(); + assertEquals(response.toJson(), tryResponse.toJson()); + } + } + + @Test public void testConnectionType() throws Exception { DatabaseReader reader = new DatabaseReader.Builder( this.getFile("GeoIP2-Connection-Type-Test.mmdb")).build(); @@ -228,10 +256,31 @@ public class DatabaseReaderTest { assertEquals(ConnectionType.CABLE_DSL, response.getConnectionType()); assertEquals(ipAddress.getHostAddress(), response.getIpAddress()); + + ConnectionTypeResponse tryResponse = reader.tryConnectionType(ipAddress).get(); + assertEquals(response.toJson(), tryResponse.toJson()); + reader.close(); } @Test + public void testCountry() throws Exception { + try (DatabaseReader reader = new DatabaseReader.Builder( + getFile("GeoIP2-Country-Test.mmdb")).build(); + ) { + InetAddress ipAddress = InetAddress.getByName("74.209.24.0"); + + CountryResponse response = reader.country(ipAddress); + assertEquals(99, response.getCountry().getConfidence().intValue()); + assertEquals(6252001, response.getCountry().getGeoNameId().intValue()); + assertEquals(ipAddress.getHostAddress(), response.getTraits().getIpAddress()); + + CountryResponse tryResponse = reader.tryCountry(ipAddress).get(); + assertEquals(response.toJson(), tryResponse.toJson()); + } + } + + @Test public void testDomain() throws Exception { DatabaseReader reader = new DatabaseReader.Builder( this.getFile("GeoIP2-Domain-Test.mmdb")).build(); @@ -239,6 +288,10 @@ public class DatabaseReaderTest { DomainResponse response = reader.domain(ipAddress); assertEquals("maxmind.com", response.getDomain()); assertEquals(ipAddress.getHostAddress(), response.getIpAddress()); + + DomainResponse tryResponse = reader.tryDomain(ipAddress).get(); + assertEquals(response.toJson(), tryResponse.toJson()); + reader.close(); } @@ -256,6 +309,10 @@ public class DatabaseReaderTest { assertEquals(ConnectionType.CABLE_DSL, response.getTraits().getConnectionType()); assertTrue(response.getTraits().isLegitimateProxy()); assertEquals(ipAddress.getHostAddress(), response.getTraits().getIpAddress()); + + EnterpriseResponse tryResponse = reader.tryEnterprise(ipAddress).get(); + assertEquals(response.toJson(), tryResponse.toJson()); + reader.close(); } @@ -272,6 +329,10 @@ public class DatabaseReaderTest { assertEquals("Telstra Internet", response.getOrganization()); assertEquals(ipAddress.getHostAddress(), response.getIpAddress()); + + IspResponse tryResponse = reader.tryIsp(ipAddress).get(); + assertEquals(response.toJson(), tryResponse.toJson()); + reader.close(); }
Add basic test for all the try methods
maxmind_GeoIP2-java
train
e79a428cf7070ba27ddf33baa5f1c464d3873dd1
diff --git a/lib/mascot/dat/peptides.rb b/lib/mascot/dat/peptides.rb index <HASH>..<HASH> 100644 --- a/lib/mascot/dat/peptides.rb +++ b/lib/mascot/dat/peptides.rb @@ -32,30 +32,30 @@ module Mascot class DAT::Peptides # A hash of the index positions for the peptide PSM matches. - # Keys are - attr_reader :psm - + # Keys arr + attr_reader :psmidx # To create a peptides enumerable, you need to pass in the dat file handle and # the byte offset of the peptides section. def initialize(dat_file, byteoffset, cache_psm_index=true) @byteoffset = byteoffset @file = File.new(dat_file,'r') @file.pos = @byteoffset - @psm = Array.new() + @curr_psm = [1,1] + @psmidx = Array.new() if cache_psm_index # create an in-memroy index of PSM byteoffsets q,p = 0 - boundary = Regexp.new(@file.readline) + @boundary = Regexp.new(@file.readline) @file.each do |line| break if line =~ boundary if (line =~ /q(\d+)_p(\d+)/) i,j = $1.to_i, $2.to_i next if q == i && p == j - unless @psm[i].kind_of? Array + unless @psmidx[i].kind_of? Array q = i - @psm[q] = [] + @psmidx[q] = [] end - @psm[i][j] = @file.pos - line.length + @psmidx[i][j] = @file.pos - line.length q,p = i,j end end @@ -66,10 +66,56 @@ module Mascot @file.pos = @byteoffset end - def parse_psm value + def read_psm q,p + @file.pos = @psmidx[q][p] + tmp = [] + file.each do |l| + break if l =~ @boundary + break unless l =~ /q#{q}_p#{p}/ + tmp << l.chomp + end + return tmp end - def parse_psm_terms value + def parse_psm psm_arr + psm_result = {} + psm_arr.each do |l| + k,v = l.split "=" + case k + when /^q\d+_p\d+$/ + #main result, must split value + psm_vals, prots = v.split(";") + psm_vals = psm_vals.split(',') + # proteins in last element + psm_result[:proteins] = prots.split(",").map do |pe| + acc,*other_vals = pe.split(":") + acc.gsub!(/\"/,'') + other_vals.map! {|e| e.to_i } + [acc] + other_vals + end + when /db$/ + # split on 2 chars, call to_i + psm_result[:dbs] = l.split(/(\d{2})/).grep(/\d/) { |e| e.to_i } + when /terms$/ + # for each protein, I have to add the term AA + psm_result[:terms] = v.split(":").collect {|t| t.split(",") } + else + # returns the smaller key + k_sym = k.slice(/q\d+_p\d+_?(.+)/,1).to_sym + psm_result[k_sym] = v + end + end + psm_result end + + # Method to read in and return a result + def result(query, rank) + parse_psm(read_psm(query,rank)) + end + def each + + end + + end end diff --git a/test/test_mascot-dat-peptides.rb b/test/test_mascot-dat-peptides.rb index <HASH>..<HASH> 100644 --- a/test/test_mascot-dat-peptides.rb +++ b/test/test_mascot-dat-peptides.rb @@ -33,6 +33,6 @@ class TestMascotDatPeptides < TestMascotDatHelper end def test_peptides_parse_first_psm - pass + # expected hash end end
initial pass at parsing query results in peptides section. in progress and unstable
delagoya_mascot-dat
train
0a9ba3e280a7830da318676a0644ee0226fd7c68
diff --git a/src/Suite/index.js b/src/Suite/index.js index <HASH>..<HASH> 100644 --- a/src/Suite/index.js +++ b/src/Suite/index.js @@ -176,10 +176,10 @@ class Suite { * * @param {Function} callback * - * @return {void} + * @return {Object} Instance of japa hook */ beforeEach (callback) { - this.group.beforeEach(callback) + return this.group.beforeEach(callback) } /** @@ -190,10 +190,10 @@ class Suite { * * @param {Function} callback * - * @return {void} + * @return {Object} Instance of japa hook */ afterEach (callback) { - this.group.afterEach(callback) + return this.group.afterEach(callback) } /** @@ -204,10 +204,10 @@ class Suite { * * @param {Function} callback * - * @return {void} + * @return {Object} Instance of japa hook */ after (callback) { - this.group.after(callback) + return this.group.after(callback) } /** @@ -218,10 +218,10 @@ class Suite { * * @param {Function} callback * - * @return {void} + * @return {Object} Instance of japa hook */ before (callback) { - this.group.before(callback) + return this.group.before(callback) } /** diff --git a/test/integration/runner.spec.js b/test/integration/runner.spec.js index <HASH>..<HASH> 100644 --- a/test/integration/runner.spec.js +++ b/test/integration/runner.spec.js @@ -57,11 +57,11 @@ jTest.group('Runner', (group) => { beforeEach(() => { called.push('beforeEach') - }) + }).timeout(2000) // Timeout function should be callable from here afterEach(() => { called.push('afterEach') - }) + }).timeout(2000) // Timeout function should be callable from here const runner = use('Test/Runner') test('2 + 2 is 4', function ({ assert }) {
feat(runner): return japa Hook instance (#<I>) to allow calling timeout function from spec files
adonisjs_adonis-vow
train
da37113307ee04f8fa3aac4740b87e5e5b577b69
diff --git a/src/Parser.php b/src/Parser.php index <HASH>..<HASH> 100644 --- a/src/Parser.php +++ b/src/Parser.php @@ -381,9 +381,6 @@ class Parser } } elseif ('\\' == $char) { $this->isValueEscaped = true; - } elseif ('%' == $char) { - $this->stateAfterCommentIsGone = $this->state; - $this->state = self::COMMENT; } else { $this->appendToBuffer($char); }
read properly the % char when inside a delimited value fix #6
renanbr_bibtex-parser
train
7664be99e23e5dadaa259e7f3418ff3651fd4f62
diff --git a/lib/active_hash/base.rb b/lib/active_hash/base.rb index <HASH>..<HASH> 100644 --- a/lib/active_hash/base.rb +++ b/lib/active_hash/base.rb @@ -308,6 +308,10 @@ module ActiveHash true end + def persisted? + self.class.all.map(&:id).include?(id) + end + def to_param id.to_s end diff --git a/spec/active_hash/base_spec.rb b/spec/active_hash/base_spec.rb index <HASH>..<HASH> 100644 --- a/spec/active_hash/base_spec.rb +++ b/spec/active_hash/base_spec.rb @@ -571,6 +571,16 @@ describe ActiveHash, "Base" do end end + describe "#persisted" do + it "should return true if the object has been saved" do + Country.create(:id => 2).should be_persisted + end + + it "should return false if the object has not been saved" do + Country.new(:id => 2).should_not be_persisted + end + end + describe "#eql?" do before do class Region < ActiveHash::Base
add persisted? method to ActiveHash::Base
zilkey_active_hash
train
3557f6fc7511cd481042f95d617fc4a0501feeb7
diff --git a/visidata/data.py b/visidata/data.py index <HASH>..<HASH> 100644 --- a/visidata/data.py +++ b/visidata/data.py @@ -23,7 +23,7 @@ globalCommand('r', 'tmp=cursorVisibleColIndex; moveRegex(sheet, regex=input("row globalCommand('zc', 'sheet.cursorVisibleColIndex = int(input("move to column number: "))', 'move to the given column number', 'view-go-column-number') globalCommand('zr', 'sheet.cursorRowIndex = int(input("move to row number: "))', 'move to the given row number', 'view-go-row-number') -globalCommand('R', 'nrows=int(input("random number to select: ")); select(random.sample(rows, nrows))', 'open duplicate sheet with a random population subset of # rows', 'rows-select-random') +globalCommand('R', 'nrows=int(input("random number to select: ")); vd.push(copy(sheet, "_sample")).rows=random.sample(rows, nrows)', 'open duplicate sheet with a random population subset of # rows', 'rows-select-random') globalCommand('a', 'rows.insert(cursorRowIndex+1, newRow()); cursorDown(1)', 'append a blank row', 'modify-add-row-blank') globalCommand('ga', 'for r in range(int(input("add rows: "))): addRow(newRow())', 'add N blank rows', 'modify-add-row-many')
[R] push sheet with random subset of rows (previously selected randomly)
saulpw_visidata
train
b61b223523997ec31fe79cffe8eb06fb3a563ae8
diff --git a/ninja-core/src/main/java/ninja/diagnostics/DiagnosticErrorRenderer.java b/ninja-core/src/main/java/ninja/diagnostics/DiagnosticErrorRenderer.java index <HASH>..<HASH> 100644 --- a/ninja-core/src/main/java/ninja/diagnostics/DiagnosticErrorRenderer.java +++ b/ninja-core/src/main/java/ninja/diagnostics/DiagnosticErrorRenderer.java @@ -22,16 +22,13 @@ import java.io.StringWriter; import java.io.Writer; import java.net.URI; import java.net.URL; -import java.util.Arrays; import java.util.List; import java.util.Map; import ninja.Context; import ninja.Cookie; -import ninja.FilterChain; import ninja.Result; import ninja.Route; import ninja.exceptions.InternalServerErrorException; -import ninja.utils.Message; import ninja.utils.ResponseStreams; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringEscapeUtils; @@ -139,7 +136,7 @@ public class DiagnosticErrorRenderer { String styleTemplate = getResource("diagnostic.css"); // simple token replacement - headerTemplate = headerTemplate.replace("${TITLE}", title); + headerTemplate = headerTemplate.replace("${TITLE}", escape(title)); headerTemplate = headerTemplate.replace("${STYLE}", styleTemplate); s.append(headerTemplate); @@ -176,7 +173,7 @@ public class DiagnosticErrorRenderer { s.append(" class=\"active\""); } - s.append("><a href=\"#tab").append(i).append("\">").append(names[i]).append("</a></li>\n"); + s.append("><a href=\"#tab").append(i).append("\">").append(escape(names[i])).append("</a></li>\n"); } s.append(" </ul>\n"); @@ -391,9 +388,9 @@ public class DiagnosticErrorRenderer { private void appendNameValue(StringBuilder sb, String name, String value) throws IOException { sb.append("<pre><span class=\"line\" style=\"width: 200px;\">"); - sb.append(name); + sb.append(escape(name)); sb.append("</span><span class=\"route\" style=\"left: 210px\">"); - sb.append(value); + sb.append(escape(value)); sb.append("</span></pre>"); } @@ -402,7 +399,7 @@ public class DiagnosticErrorRenderer { } private void appendNoValues(StringBuilder sb, String title) throws IOException { - sb.append("<pre style=\"border-bottom: 0px;\"><span style=\"position: absolute; left: 45px;\">").append(title).append("</span></pre><br/>"); + sb.append("<pre style=\"border-bottom: 0px;\"><span style=\"position: absolute; left: 45px;\">").append(escape(title)).append("</span></pre><br/>"); } private DiagnosticErrorRenderer appendSourceSnippet(URI sourceLocation, @@ -427,7 +424,7 @@ public class DiagnosticErrorRenderer { s.append("<span class=\"") .append("route") .append("\">") - .append(StringEscapeUtils.escapeHtml4(sourceLines.get(i))) + .append(escape(sourceLines.get(i))) .append("</span>"); s.append("</pre>"); } @@ -455,6 +452,10 @@ public class DiagnosticErrorRenderer { return sw.toString(); } + private String escape(String value) { + return StringEscapeUtils.escapeHtml4(value); + } + private String getResource(String resourceName) throws IOException { URL url = getClass().getResource(resourceName); if (url == null) {
Added additional HTML escaping for name/value pairs
ninjaframework_ninja
train
a9c266b4869f0735abcb98a1833263fa6a87f2a1
diff --git a/indra/pysb_assembler.py b/indra/pysb_assembler.py index <HASH>..<HASH> 100644 --- a/indra/pysb_assembler.py +++ b/indra/pysb_assembler.py @@ -6,9 +6,6 @@ from pysb import (Model, Monomer, Parameter, Rule, Annotation, from pysb.core import SelfExporter import pysb.export -from bel import bel_api -from biopax import biopax_api -from trips import trips_api from indra import statements as ist from indra.databases import context_client
Remove unnecessary imports from PySB assembler
sorgerlab_indra
train
6bb8ae9ca503a8c942c71d4907add298a92bc421
diff --git a/java/src/main/java/com/anaconda/skein/Model.java b/java/src/main/java/com/anaconda/skein/Model.java index <HASH>..<HASH> 100644 --- a/java/src/main/java/com/anaconda/skein/Model.java +++ b/java/src/main/java/com/anaconda/skein/Model.java @@ -18,10 +18,10 @@ public class Model { } } - private static void throwIfNonPositive(int i, String param) + private static void throwIfLessThan(int i, int min, String param) throws IllegalArgumentException { - if (i <= 0) { - throw new IllegalArgumentException(param + " must be > 0"); + if (i < min) { + throw new IllegalArgumentException(param + " must be > " + min + ", got " + i); } } @@ -85,10 +85,11 @@ public class Model { public Set<String> getDepends() { return depends; } public void validate() throws IllegalArgumentException { - throwIfNonPositive(instances, "instances"); + throwIfLessThan(instances, 0, "instances"); + throwIfLessThan(instances, -1, "maxRestarts"); throwIfNull(resources, "resources"); - throwIfNonPositive(resources.getMemory(), "resources.memory"); - throwIfNonPositive(resources.getVirtualCores(), "resources.vcores"); + throwIfLessThan(resources.getMemory(), 1, "resources.memory"); + throwIfLessThan(resources.getVirtualCores(), 1, "resources.vcores"); throwIfNull(localResources, "localResources"); throwIfNull(env, "env"); throwIfNull(commands, "commands"); @@ -144,7 +145,7 @@ public class Model { public void validate() throws IllegalArgumentException { throwIfNull(name, "name"); throwIfNull(queue, "queue"); - throwIfNonPositive(maxAttempts, "maxAttempts"); + throwIfLessThan(maxAttempts, 1, "maxAttempts"); throwIfNull(tags, "tags"); throwIfNull(services, "services"); if (services.size() == 0) {
Allow 0 for instances in spec
jcrist_skein
train
a1ac175fdb840f0738a377b08885037683dfe903
diff --git a/src/Requisite/Rule/NamespaceDirectoryMapper.php b/src/Requisite/Rule/NamespaceDirectoryMapper.php index <HASH>..<HASH> 100644 --- a/src/Requisite/Rule/NamespaceDirectoryMapper.php +++ b/src/Requisite/Rule/NamespaceDirectoryMapper.php @@ -53,6 +53,10 @@ class NamespaceDirectoryMapper implements AutoLoadRuleInterface { */ public function loadClass( $class ) { + //make sure the class name is absolute + if ( 0 !== strpos( $class, '\\' ) ) + $class = '\\' . $class; + // check if the namespace matches the class if ( 0 !== strpos( $class, $this->base_ns ) ) return FALSE;
make sure, class names starts always with a backslash
dnaber-de_Requisite
train
2800de660c4bb7cf4b6072569755c19a98398af2
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -51,7 +51,7 @@ docs_requirements = [ "distributed>=2.0", ] -gis_requirements = ["clisops>=0.4.0"] +gis_requirements = ["clisops>0.4.0"] dev_requirements = [] with open("requirements_dev.txt") as dev:
Set clisops to version above <I> (due to commit <I>da<I>d<I>ff8b<I>eccec4e3d<I>ec0cb8a)
Ouranosinc_xclim
train
4267c9cb2300cc0792b25b5a8ef020454e8521c1
diff --git a/openquake/commonlib/source.py b/openquake/commonlib/source.py index <HASH>..<HASH> 100644 --- a/openquake/commonlib/source.py +++ b/openquake/commonlib/source.py @@ -269,17 +269,17 @@ def accept_path(path, ref_path): >>> accept_path(['SM2'], ('SM2', 'a3b1')) False - >>> accept_path(['SM2', '*'], ('SM2', 'a3b1')) + >>> accept_path(['SM2', '@'], ('SM2', 'a3b1')) True - >>> accept_path(['*', 'a3b1'], ('SM2', 'a3b1')) + >>> accept_path(['@', 'a3b1'], ('SM2', 'a3b1')) True - >>> accept_path('**', ('SM2', 'a3b1')) + >>> accept_path('@@', ('SM2', 'a3b1')) True """ if len(path) != len(ref_path): return False for a, b in zip(path, ref_path): - if a != '*' and a != b: + if a != '@' and a != b: return False return True
Changed *->@ [skip hazardlib]
gem_oq-engine
train
25da38373af6f3b578001ea76c4a2f16451bd6d0
diff --git a/src/Monolog/Handler/SlackHandler.php b/src/Monolog/Handler/SlackHandler.php index <HASH>..<HASH> 100644 --- a/src/Monolog/Handler/SlackHandler.php +++ b/src/Monolog/Handler/SlackHandler.php @@ -144,6 +144,17 @@ class SlackHandler extends SocketHandler protected function write(array $record) { parent::write($record); + $this->finalizeWrite(); + } + + /** + * Finalizes the request by reading some bytes and then closing the socket + * + * If we do not read some but close the socket too early, slack sometimes + * drops the request entirely. + */ + protected function finalizeWrite() + { $res = $this->getResource(); if (is_resource($res)) { @fread($res, 2048);
Split off work into a new method to facilitate extension, fixes #<I>
Seldaek_monolog
train
734c5d73db0411f5ba3072658a8729992782ba0a
diff --git a/example/examples/CachedMap.js b/example/examples/CachedMap.js index <HASH>..<HASH> 100644 --- a/example/examples/CachedMap.js +++ b/example/examples/CachedMap.js @@ -66,8 +66,8 @@ class CachedMap extends React.Component { cacheEnabled={this.state.cache} zoomEnabled scrollingEnabled - loadingIndicatorColor={'#666666'} - loadingBackgroundColor={'#eeeeee'} + loadingIndicatorColor="#666666" + loadingBackgroundColor="#eeeeee" > <MapView.Marker coordinate={region} diff --git a/example/examples/LoadingMap.js b/example/examples/LoadingMap.js index <HASH>..<HASH> 100644 --- a/example/examples/LoadingMap.js +++ b/example/examples/LoadingMap.js @@ -41,8 +41,8 @@ class LoadingMap extends React.Component { initialRegion={this.state.region} onPress={this.onMapPress} loadingEnabled - loadingIndicatorColor={'#666666'} - loadingBackgroundColor={'#eeeeee'} + loadingIndicatorColor="#666666" + loadingBackgroundColor="#eeeeee" > <MapView.Marker coordinate={{
kept double quote and removed curly braces
react-native-community_react-native-maps
train
23d61b4452c6bee401c66ef8d416e74804cd8d6c
diff --git a/src/Utility.php b/src/Utility.php index <HASH>..<HASH> 100644 --- a/src/Utility.php +++ b/src/Utility.php @@ -33,7 +33,7 @@ class Utility * @param mixed $string * @param string $encoding */ - public function __construct($string = '', $encoding = null) + public function __construct($string = '', string $encoding = null) { if (is_array($string)) { throw new InvalidStringException('Arrays cannot be converted to string'); @@ -45,7 +45,7 @@ class Utility $this->string = (string)$string; } - $this->encoding = $encoding ?: \mb_internal_encoding(); + $this->setEncoding($encoding ?: \mb_internal_encoding()); } /** @@ -706,6 +706,20 @@ class Utility } /** + * Set the strings encoding + * + * @param string $encoding + * + * @return $this + */ + public function setEncoding(string $encoding): Utility + { + $this->encoding = trim($encoding); + + return $this; + } + + /** * Create the substring from index specified by $start up to, but not including the index specified by $end. * If $end value is omitted, the rest of the string is used. * If $end is negative, it is computed from the end of the string. diff --git a/tests/EncodingTest.php b/tests/EncodingTest.php index <HASH>..<HASH> 100644 --- a/tests/EncodingTest.php +++ b/tests/EncodingTest.php @@ -25,8 +25,20 @@ class EncodingTest extends BaseStringSuite * @dataProvider encodingProvider * @covers ::encoding */ - public function testStringsEncodingIsSet($encoding) + public function testStringsEncodingIsSetViaConstructor($encoding) { $this->assertEquals($encoding, $this->utility('hello world', $encoding)->encoding()); } + + /** + * Check that the strings encoding is set correctly + * + * @param string $encoding + * @dataProvider encodingProvider + * @covers ::setEncoding + */ + public function testStringsEncodingIsSetViaMethod($encoding) + { + $this->assertEquals($encoding, $this->utility('hello world')->setEncoding($encoding)->encoding()); + } }
added: method for setting the string encoding
myerscode_utilities-strings
train
f53eab9b8d4fbc76d4e7d057d835bc97ad23d999
diff --git a/src/cli-util.js b/src/cli-util.js index <HASH>..<HASH> 100644 --- a/src/cli-util.js +++ b/src/cli-util.js @@ -328,8 +328,21 @@ function formatFiles(argv) { function createUsage() { const options = constant.detailOptions; + const optionsWithOpposites = options.map(option => [ + option.description ? option : null, + option.oppositeDescription + ? Object.assign({}, option, { + name: `no-${option.name}`, + type: "boolean", + description: option.oppositeDescription + }) + : null + ]); + const flattenedOptions = [].concat + .apply([], optionsWithOpposites) + .filter(Boolean); - const groupedOptions = options.reduce((current, option) => { + const groupedOptions = flattenedOptions.reduce((current, option) => { const category = capitalize(option.category); const group = (current[category] = current[category] || []); group.push(option); @@ -350,24 +363,8 @@ function createUsage() { ); const optionsUsage = allCategories.map(category => { - const categoryOptions = [].concat - .apply( - [], - groupedOptions[category].map(option => [ - option.description && - createOptionUsage(option, OPTION_USAGE_THRESHOLD), - option.oppositeDescription && - createOptionUsage( - Object.assign({}, option, { - name: `no-${option.name}`, - type: "boolean", - description: option.oppositeDescription - }), - OPTION_USAGE_THRESHOLD - ) - ]) - ) - .filter(Boolean) + const categoryOptions = groupedOptions[category] + .map(option => createOptionUsage(option, OPTION_USAGE_THRESHOLD)) .join("\n"); return `${category} options:\n\n${indent(categoryOptions, 2)}`; });
Make cli-util.js a bit more readable
josephfrazier_prettier_d
train
fc02c2c1339702b2ed024a3664c5ac78ffde7356
diff --git a/src/Behat/Mink/Driver/ZombieDriver.php b/src/Behat/Mink/Driver/ZombieDriver.php index <HASH>..<HASH> 100644 --- a/src/Behat/Mink/Driver/ZombieDriver.php +++ b/src/Behat/Mink/Driver/ZombieDriver.php @@ -662,31 +662,15 @@ JS; * * @param string $event The name of the event * @param string $xpath The xpath of the element to trigger this event - * @param array $opts Additional event options (key-value) - * @param array $attrs Additional event attributes (key-value) */ - protected function triggerBrowserEvent($event, $xpath, array $opts = array(), array $attrs = array()) + protected function triggerBrowserEvent($event, $xpath) { if (!$ref = $this->getNativeRefForXPath($xpath)) { return; } - // Merge event attributes with event options - if (!empty($attrs)) { - $mergedAttrs = array_merge( - (isset($opt["attributes"]) ? $opt["attributes"] : array()), $attrs - ); - - if (!empty($mergedAttrs)) { - $opts["attributes"] = $mergedAttrs; - } - } - - // Encode options array - $opts = !empty($opts) ? json_encode($opts) : "{}"; - $js = <<<JS -browser.fire("{$event}", {$ref}, {$opts}, function(err) { +browser.fire("{$event}", {$ref}, function(err) { if (err) { stream.end(JSON.stringify(err.stack)); } else {
fixed ZombieDriver to support latest zombie.js
minkphp_Mink
train
a8e34b2266c4880351a6bba0e51ecc2f33b59908
diff --git a/param/parameterized.py b/param/parameterized.py index <HASH>..<HASH> 100644 --- a/param/parameterized.py +++ b/param/parameterized.py @@ -297,6 +297,17 @@ def instance_descriptor(f): return _f +def get_method_owner(method): + """ + Gets the instance that owns the supplied method + """ + if not inspect.ismethod(method): + return None + if isinstance(method, partial): + method = method.func + return method.__self__ if sys.version_info.major >= 3 else method.im_self + + @accept_arguments def depends(func, *dependencies, **kw): """ @@ -475,8 +486,10 @@ def _params_depended_on(minfo): return params -def _m_caller(self,n): - return lambda event: getattr(self,n)() +def _m_caller(self, n): + caller = lambda event: getattr(self,n)() + caller._watcher_name = n + return caller PInfo = namedtuple("PInfo","inst cls name pobj what") @@ -2432,6 +2445,25 @@ class Parameterized(object): """ self.initialized=False + # When making a copy the internal watchers have to be + # recreated and point to the new instance + if '_param_watchers' in state: + param_watchers = state['_param_watchers'] + for p, attrs in param_watchers.items(): + for attr, watchers in attrs.items(): + new_watchers = [] + for watcher in watchers: + watcher_args = list(watcher) + if watcher.inst is not None: + watcher_args[0] = self + fn = watcher.fn + if hasattr(fn, '_watcher_name'): + watcher_args[2] = _m_caller(self, fn._watcher_name) + elif get_method_owner(fn) is watcher.inst: + watcher_args[2] = getattr(self, fn.__name__) + new_watchers.append(Watcher(*watcher_args)) + param_watchers[p][attr] = new_watchers + if '_instance__params' not in state: state['_instance__params'] = {} if '_param_watchers' not in state: diff --git a/tests/API1/testwatch.py b/tests/API1/testwatch.py index <HASH>..<HASH> 100644 --- a/tests/API1/testwatch.py +++ b/tests/API1/testwatch.py @@ -1,14 +1,15 @@ """ Unit test for watch mechanism """ -from . import API1TestCase - -from .utils import MockLoggingHandler +import copy import param from param.parameterized import discard_events +from . import API1TestCase +from .utils import MockLoggingHandler + class Accumulator(object): @@ -37,6 +38,9 @@ class SimpleWatchExample(param.Parameterized): c = param.Parameter(default=0) d = param.Integer(default=0) + def method(self, event): + self.b = self.a * 2 + class SimpleWatchSubclass(SimpleWatchExample): pass @@ -466,6 +470,18 @@ class TestWatch(API1TestCase): self.assertEqual(args[1].new, 0) self.assertEqual(args[1].type, 'set') + def test_watch_deepcopy(self): + obj = SimpleWatchExample() + + obj.param.watch(obj.method, ['a']) + + copied = copy.deepcopy(obj) + + copied.a = 2 + + self.assertEqual(copied.b, 4) + self.assertEqual(obj.b, 0) + class TestWatchMethod(API1TestCase): @@ -523,6 +539,15 @@ class TestWatchMethod(API1TestCase): obj.b = 3 self.assertEqual(obj.c, 6) + def test_watcher_method_deepcopy(self): + obj = WatchMethodExample(b=5) + + copied = copy.deepcopy(obj) + + copied.b = 11 + self.assertEqual(copied.b, 10) + self.assertEqual(obj.b, 5) + class TestWatchValues(API1TestCase):
Ensure that Parameterized remaps watchers on copy (#<I>)
pyviz_param
train
265c2dc073c883eac9a1c0605afe936c2e897ffd
diff --git a/tests/test_main.py b/tests/test_main.py index <HASH>..<HASH> 100755 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -2002,7 +2002,7 @@ def test_kel_thuzad(): game.end_turn() # ensure the effect is gone when Kel'Thuzad dies - game.player1.give(MOONFIRE).play(target=game.player1.field[1]) + game.player2.give(MOONFIRE).play(target=game.player1.field[1]) kt.destroy() assert len(game.player1.field) == 0 game.end_turn()
Fix Kel'Thuzad's test to play cards on the correct turn
jleclanche_fireplace
train
f207004570680afacd9e840dc03b8bbbf7dea1b6
diff --git a/ember_debug/adapters/firefox.js b/ember_debug/adapters/firefox.js index <HASH>..<HASH> 100644 --- a/ember_debug/adapters/firefox.js +++ b/ember_debug/adapters/firefox.js @@ -44,7 +44,7 @@ export default BasicAdapter.extend({ _listen() { - window.addEventListener('ember-debug-receive', function(event) { + window.addEventListener('ember-debug-receive', (event) => { var message = event.detail; run(() => { // FIX: needed to fix permission denied exception on Firefox >= 30 @@ -65,4 +65,3 @@ export default BasicAdapter.extend({ } }); -
Fix broken firefox version after ES6 refactor
emberjs_ember-inspector
train
64ce3be36b404d38fc4f8f1836c6d801916f1eaa
diff --git a/test/lib/smtpapi_header.test.js b/test/lib/smtpapi_header.test.js index <HASH>..<HASH> 100644 --- a/test/lib/smtpapi_header.test.js +++ b/test/lib/smtpapi_header.test.js @@ -133,5 +133,27 @@ describe('SmtpapiHeader', function() { assert(!_.isEmpty(JSON.parse(json).filters), 'should be empty'); }); + + it('should not remove any parameters on this object', function() { + header.addTo('kyle.partridge@sendgrid.com'); + header.addTo(['david.tomberlin@sendgrid.com']); + header.addUniqueArgs({foo: 'bar'}); + header.addFilterSetting('footer', 'enable', 1); + header.addFilterSetting('footer', 'text/html', '<b>boo</b>'); + + // call the json method to test if it removed anything + header.toJson(); + + header.to.should.eql(['kyle.partridge@sendgrid.com', 'david.tomberlin@sendgrid.com']); + header.unique_args.should.eql({foo: 'bar'}); + header.filters.should.eql({ + footer: { + settings: { + enable: 1, + 'text/html': '<b>boo</b>' + } + } + }); + }); }); });
added a test to make sure toJson is not destructive
sendgrid_sendgrid-nodejs
train
6bba2de44cdf4326f527601090405a4203b10e4a
diff --git a/tests/src/test/java/alluxio/master/block/BlockMasterIntegrityIntegrationTest.java b/tests/src/test/java/alluxio/master/block/BlockMasterIntegrityIntegrationTest.java index <HASH>..<HASH> 100644 --- a/tests/src/test/java/alluxio/master/block/BlockMasterIntegrityIntegrationTest.java +++ b/tests/src/test/java/alluxio/master/block/BlockMasterIntegrityIntegrationTest.java @@ -18,6 +18,7 @@ import alluxio.client.WriteType; import alluxio.client.file.FileSystem; import alluxio.client.file.FileSystemTestUtils; import alluxio.master.LocalAlluxioCluster; +import alluxio.master.file.BlockDeletionContext; import alluxio.master.file.FileSystemMaster; import alluxio.master.file.meta.InodeTree; import alluxio.master.file.meta.LockedInodePath; @@ -78,9 +79,12 @@ public class BlockMasterIntegrityIntegrationTest { InodeTree tree = Whitebox.getInternalState(fsm, "mInodeTree"); LockedInodePath path = tree.lockInodePath(uri, InodeTree.LockMode.WRITE); DeleteOptions options = DeleteOptions.defaults(); - JournalContext ctx = Whitebox.invokeMethod(fsm, "createJournalContext"); - Whitebox.invokeMethod(fsm, "deleteAndJournal", path, options, ctx); - ctx.close(); + BlockDeletionContext bctx = Whitebox.invokeMethod(fsm, "createBlockDeletionContext"); + JournalContext jctx = Whitebox.invokeMethod(fsm, "createJournalContext"); + Whitebox.invokeMethod(fsm, "deleteAndJournal", path, options, jctx, bctx); + path.close(); + jctx.close(); // Journal Context is closed before Block Context + bctx.close(); mCluster.stopWorkers(); mCluster.restartMasters(); mCluster.startWorkers(); // creates a new worker, so need to get the new BlockWorker @@ -107,9 +111,12 @@ public class BlockMasterIntegrityIntegrationTest { InodeTree tree = Whitebox.getInternalState(fsm, "mInodeTree"); LockedInodePath path = tree.lockInodePath(uri, InodeTree.LockMode.WRITE); DeleteOptions options = DeleteOptions.defaults(); - JournalContext ctx = Whitebox.invokeMethod(fsm, "createJournalContext"); - Whitebox.invokeMethod(fsm, "deleteAndJournal", path, options, ctx); - ctx.close(); + BlockDeletionContext bctx = Whitebox.invokeMethod(fsm, "createBlockDeletionContext"); + JournalContext jctx = Whitebox.invokeMethod(fsm, "createJournalContext"); + Whitebox.invokeMethod(fsm, "deleteAndJournal", path, options, jctx, bctx); + path.close(); + jctx.close(); // Journal Context is closed before Block Context + bctx.close(); CommonUtils.waitFor("invalid blocks to be deleted", (v) -> worker.getStoreMetaFull().getNumberOfBlocks() == 0, WaitForOptions.defaults().setTimeoutMs(2000));
Update Block Integrity Check Test. (#<I>)
Alluxio_alluxio
train
9035780ee980b8a795d33d4bd6375b10e486c58c
diff --git a/src/frontend/org/voltdb/RealVoltDB.java b/src/frontend/org/voltdb/RealVoltDB.java index <HASH>..<HASH> 100644 --- a/src/frontend/org/voltdb/RealVoltDB.java +++ b/src/frontend/org/voltdb/RealVoltDB.java @@ -49,12 +49,9 @@ import java.util.Map; import java.util.Random; import java.util.Set; import java.util.concurrent.CountDownLatch; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.Semaphore; -import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; @@ -340,6 +337,9 @@ public class RealVoltDB implements VoltDBInterface, RestoreAgent.Callback } } + collectLocalNetworkMetadata(); + m_clusterMetadata.put(m_messenger.getHostId(), getLocalMetadata()); + /* * Create execution sites runners (and threads) for all exec sites except the first one. * This allows the sites to be set up in the thread that will end up running them. @@ -980,9 +980,6 @@ public class RealVoltDB implements VoltDBInterface, RestoreAgent.Callback hostLog.info(line.trim()); } - collectLocalNetworkMetadata(); - m_clusterMetadata.put(m_messenger.getHostId(), getLocalMetadata()); - /* * Publish our cluster metadata, and then retrieve the metadata * for the rest of the cluster
Populate the cluster metadata object before starting invocation buffer server.
VoltDB_voltdb
train
67aaada839759ddb5b3526badcbf3e6bd9f7c7a8
diff --git a/lib/jekyll/site.rb b/lib/jekyll/site.rb index <HASH>..<HASH> 100644 --- a/lib/jekyll/site.rb +++ b/lib/jekyll/site.rb @@ -63,7 +63,7 @@ module Jekyll self.tags = Hash.new { |hash, key| hash[key] = [] } if self.limit_posts < 0 - raise ArgumentError, "limit_posts must not be a negative number" + raise ArgumentError, "limit_posts must be a non-negative number" end end
Clean up some copy in the limit_posts error handling.
jekyll_jekyll
train
44ed297c6ca69b722d6e0cf3bb81011299dfe7cd
diff --git a/src/share/classes/com/sun/tools/javac/comp/MemberEnter.java b/src/share/classes/com/sun/tools/javac/comp/MemberEnter.java index <HASH>..<HASH> 100644 --- a/src/share/classes/com/sun/tools/javac/comp/MemberEnter.java +++ b/src/share/classes/com/sun/tools/javac/comp/MemberEnter.java @@ -982,6 +982,7 @@ public class MemberEnter extends JCTree.Visitor implements Completer { // Enter all member fields and methods of a set of half completed // classes in a second phase. + annotate.laterOnFlush(typeAnnotations.annotator(tree)); if (wasFirst) { try { while (halfcompleted.nonEmpty()) { @@ -991,7 +992,6 @@ public class MemberEnter extends JCTree.Visitor implements Completer { isFirst = true; } - annotate.laterOnFlush(typeAnnotations.annotator(tree)); annotate.flush(); } }
properly enrich some type annotations found later in completion queue
wmdietl_jsr308-langtools
train
40d787a40a4bb516d50ae89fbb81d656ce240237
diff --git a/examples/chap2/pinger.rb b/examples/chap2/pinger.rb index <HASH>..<HASH> 100644 --- a/examples/chap2/pinger.rb +++ b/examples/chap2/pinger.rb @@ -8,28 +8,27 @@ require 'rubygems' require 'bud' class Pinger < Bud - def initialize(ip, port) + def initialize(me, other, period) + @me = me + @other = other + @period = period + ip, port = me.split(':') super ip, port - @me = ARGV[0] - @other = ARGV[1] end def state channel :flow, ['@otherloc', 'me', 'msg', 'wall', 'budtick'] - periodic :timer, ARGV[2], ['id'], ['time'] + periodic :timer, @period, ['id'], ['time'] terminal :out, ['text'] end declare def logic - # whenever we get a timer, send out a tuple + # whenever we get a timer event, send out a tuple flow <~ timer.map {|t| [@other, @me, 'ping!', t.time, budtime]} out <= flow.map {|f| [f.inspect]} end end -source = ARGV[0].split(':') -program = Pinger.new(source[0], source[1]) +program = Pinger.new(ARGV[0], ARGV[1], ARGV[2]) program.run - - diff --git a/examples/chap2/ponger.rb b/examples/chap2/ponger.rb index <HASH>..<HASH> 100644 --- a/examples/chap2/ponger.rb +++ b/examples/chap2/ponger.rb @@ -7,10 +7,11 @@ require 'rubygems' require 'bud' class Ponger < Bud - def initialize(ip, port) + def initialize(me, other) + @me = me + @other = other + ip, port = me.split(':') super ip, port - @me = ARGV[0] - @other = ARGV[1] end def state @@ -21,11 +22,10 @@ class Ponger < Bud declare def logic # whenever we get a ping, send a pong + flow <~ flow.map {|p| [@other, @me, p.msg+": pong!", Time.new.to_s, budtime]} out <= flow.map {|f| [f.inspect]} - flow <~ flow.map {|p| [@other, @me, p.msg+": pong!", Time.new.to_s, budtime]} end end -source = ARGV[0].split(':') -program = Ponger.new(source[0], source[1]) +program = Ponger.new(ARGV[0], ARGV[1]) program.run
Code cleanup for pinger/ponger examples.
bloom-lang_bud
train
37961e34f8817b7d8be4b8a34baf0b3a826489a5
diff --git a/src/Node/Block/ListItem.php b/src/Node/Block/ListItem.php index <HASH>..<HASH> 100644 --- a/src/Node/Block/ListItem.php +++ b/src/Node/Block/ListItem.php @@ -20,7 +20,9 @@ class ListItem extends Container $node = $this->children[$i]; if ($node instanceof Paragraph) { - $this->children[$i] = new String($node->getText()); + // Ensure the line ends with a newline + $content = $node->getText()->rtrim("\n")->append("\n"); + $this->children[$i] = new String($content); } } }
Add newline to terse list content.
fluxbb_commonmark
train
1ac2128075f0ab574f93020450e559d5aec1b564
diff --git a/mod/quiz/attempt.php b/mod/quiz/attempt.php index <HASH>..<HASH> 100644 --- a/mod/quiz/attempt.php +++ b/mod/quiz/attempt.php @@ -55,6 +55,7 @@ // Print the page header if (!empty($quiz->popup)) { + define('MESSAGE_WINDOW', true); // This prevents the message window coming up print_header("$course->shortname: $quiz->name", '', '', '', '', false, '', '', false, ''); } else {
No message popups during secure quizzes
moodle_moodle
train
cd928687c4a633b55e7cb4aa2241b59b4a308ea4
diff --git a/efilter/version.py b/efilter/version.py index <HASH>..<HASH> 100644 --- a/efilter/version.py +++ b/efilter/version.py @@ -28,39 +28,46 @@ commitish, being output of a hash function, doesn't. __author__ = "Adam Sindelar <adamsh@google.com>" -import datetime -from dateutil import parser -import pytz -import re -import subprocess - - -def _unix_epoch(date): - """Convert datetime object to a UTC UNIX timestamp.""" - td = date - datetime.datetime(1970, 1, 1, tzinfo=pytz.UTC) - return int(td.total_seconds()) - -def run_git_log(): - """Generate version based on date of last commit. +import re - Returns: - UTC UNIX timestamp as int on success, or None. - """ - try: - p = subprocess.Popen( - ["git", "log", "-1", "--format=%cd", "--date=iso-strict"], - stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False) - p.stderr.close() - output = p.stdout.readlines()[0] - date = parser.parse(output) - return _unix_epoch(date) - except (OSError, IndexError): - # Even if git log fails (because it's not in a git repo), the call may - # still 'succeed' as far as subprocess.Popen is concerned, hence the - # IndexError exception. I don't know why Python sometimes ignores - # the return code. - return None +try: + import datetime + import pytz + import subprocess + + # The below functionality is only available if dateutil is installed. + from dateutil import parser + + def _unix_epoch(date): + """Convert datetime object to a UTC UNIX timestamp.""" + td = date - datetime.datetime(1970, 1, 1, tzinfo=pytz.UTC) + return int(td.total_seconds()) + + def run_git_log(): + """Generate version based on date of last commit. + + Returns: + UTC UNIX timestamp as int on success, or None. + """ + try: + p = subprocess.Popen( + ["git", "log", "-1", "--format=%cd", "--date=iso-strict"], + stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False) + p.stderr.close() + output = p.stdout.readlines()[0] + date = parser.parse(output) + return _unix_epoch(date) + except (OSError, IndexError): + # Even if git log fails (because it's not in a git repo), the call + # may still 'succeed' as far as subprocess.Popen is concerned, + # hence the IndexError exception. I don't know why Python sometimes + # ignores the return code. + return None +except ImportError: + # If there's no dateutil then doing the git tango is pointless. + def run_git_log(): + pass def get_pkg_version():
Workaround for pip ignoring setup_requires.
google_dotty
train
a34ef8dcb764bdfd4eee3bd022c1f2ee8abde474
diff --git a/src/artist.py b/src/artist.py index <HASH>..<HASH> 100644 --- a/src/artist.py +++ b/src/artist.py @@ -5,10 +5,33 @@ class Artist(Entity): """ wraps the artist entity type as described at http://developer.musicmetric.com/timeseries.html """ - def __init__(self, artistUUID): + summary_attrs = ("class", "name", "id", "description", "musicbrainz", "previous_rank", "rank") + def __init__(self, artistUUID, **kwargs): + """ + creates an artist instance. UUID required(or equivelant 3rd party id with prefix, + see http://developer.musicmetric.com/identification.html for details) + any attributes that are in + """ self.entity_type = 'artist' self.entity_id = artistUUID + self.fetched_summary = False # prevent the summary from being fetched multiple times + for key, val in kwargs.items(): + if key in Artist.summary_attrs: + setattr(self, key, val) + else: + raise KeyError("unexpected creation attribute") + + def __getattr__(self, attr): + if attr in Artist.summary_attrs and not self.fetched_summary: + self.fetch_summary() + return getattr(self, attr) + return getattr(super(Artist, self), attr) + def fetch_summary(self): + """ + grabs the summary info and sets the corisponding attributes. + Note: overides existing attribute values for these attributes + """ self.response_from() #should really clean up the triple nest for k,v in self.response.items(): @@ -21,6 +44,7 @@ class Artist(Entity): else: for ssk,ssv in subv.items(): setattr(self,ssk,ssv) + self.fetched_summary = True
now handles explict preseting of summary attrs via kw
musicmetric_mmpy
train
92c8ff06ecdcc269591a0041e0740fe1856f3fe2
diff --git a/lib/opbeat/configuration.rb b/lib/opbeat/configuration.rb index <HASH>..<HASH> 100644 --- a/lib/opbeat/configuration.rb +++ b/lib/opbeat/configuration.rb @@ -69,7 +69,7 @@ module Opbeat def validate! %w{app_id secret_token organization_id}.each do |key| - raise Error.new("Configuration missing `#{key}'") unless self.send(key) + raise Error.new("Opbeat Configuration missing `#{key}'") unless self.send(key) end true
Error message: include word Opbeat
opbeat_opbeat-ruby
train
ba646c1aeafc7aa5ba9aca0319f8737599eae4cb
diff --git a/tools/setup/service_manager.js b/tools/setup/service_manager.js index <HASH>..<HASH> 100644 --- a/tools/setup/service_manager.js +++ b/tools/setup/service_manager.js @@ -201,7 +201,7 @@ ServiceManagerKlass.prototype = { fileManager.touch(chainFolder + '/' + sealerPassphraseFile, sealerPassword); return "geth --networkid " + networkId + " --datadir " + chainDataDir + " --port " + chainPort + - " --rpc --rpcapi eth,net,web3,personal --rpcport " + rpcPort + " --rpcaddr " + rpcHost + " --ws" + + " --rpc --rpcapi eth,net,web3,personal,txpool --rpcport " + rpcPort + " --rpcaddr " + rpcHost + " --ws" + " --wsport " + wsPort + " --wsorigins '*' --wsaddr " + wsHost + " --etherbase " + sealerAddr + " --mine --minerthreads 1 --targetgaslimit " + gasLimit[chain] + " --gasprice \"" + gasPrice + "\" --unlock " + sealerAddr + " --password "+ chainDataDir + "/" + sealerPassphraseFile + " 2> " +
added txpool to list of rpc-api's
OpenSTFoundation_openst-platform
train
75a11ed22e4a2fba89054771a74f12458522d5c4
diff --git a/lib/Models/RegionMapping.js b/lib/Models/RegionMapping.js index <HASH>..<HASH> 100644 --- a/lib/Models/RegionMapping.js +++ b/lib/Models/RegionMapping.js @@ -565,14 +565,16 @@ function addDescriptionAndProperties(regionMapping, regionIndices, regionImagery if (defined(cRowObject)) { cRowObject.string._terria_columnAliases = columnAliases; cRowObject.string._terria_numericalProperties = cRowObject.number; - imageryLayerFeatureInfo.properties = cRowObject.string; + imageryLayerFeatureInfo.properties = Object.assign({}, feature.properties, cRowObject.string); } else { - imageryLayerFeatureInfo.properties = {}; + imageryLayerFeatureInfo.properties = Object.assign({}, feature.properties); } } else { // Time-varying. imageryLayerFeatureInfo.description = new CallbackProperty(getRegionRowDescriptionPropertyCallbackForId(uniqueId), false); - imageryLayerFeatureInfo.properties = new CallbackProperty(getRegionRowPropertiesPropertyCallbackForId(uniqueId), false); + // Merge vector tile and data properties + var propertiesCallback = getRegionRowPropertiesPropertyCallbackForId(uniqueId); + imageryLayerFeatureInfo.properties = new CallbackProperty(time => Object.assign({}, feature.properties, propertiesCallback(time)), false); } imageryLayerFeatureInfo.data = { id: uniqueId }; // For region highlight return imageryLayerFeatureInfo; @@ -719,7 +721,7 @@ function createNewRegionImageryLayer(regionMapping, layerIndex, regionIndices, g ImageryProviderHooks.addRecolorFunc(regionImageryProvider, colorFunction); layer = ImageryLayerCatalogItem.enableLayer(catalogItem, regionImageryProvider, catalogItem.opacity, layerIndex, globeOrMap); if (globeOrMap instanceof Leaflet && colorFunction) { - layer.options.crossOrigin = true; // Allow cross origin tiles + layer.options.crossOrigin = true; // Allow cross origin tiles layer.on('tileload', function(evt) { if (evt.tile._recolored) { // Already recoloured (this event is called when the recoloured tile "loads")
Add properties from vector tile features to feature info
TerriaJS_terriajs
train
0d7e85bd4cab16e5d8b3b91fcb6c792dbf590c2f
diff --git a/pyzotero/zotero.py b/pyzotero/zotero.py index <HASH>..<HASH> 100644 --- a/pyzotero/zotero.py +++ b/pyzotero/zotero.py @@ -674,30 +674,41 @@ class Zotero(object): data = req.text return self._json_processor(feedparser.parse(data)) - def register_upload(authdata): + def get_auth(attachment): """ - Step 3: upload successful, so register it + Step 1: get upload authorisation for a file """ - reg_headers = { + mtypes = mimetypes.guess_type(attachment) + digest = hashlib.md5() + with open(attachment, 'rb') as f: + for chunk in iter(lambda: f.read(8192), b''): + digest.update(chunk) + auth_headers = { 'Content-Type': 'application/x-www-form-urlencoded', 'If-None-Match': '*', 'User-Agent': 'Pyzotero/%s' % __version__ } - reg_data = { - 'upload': authdata.get('uploadKey') + data = { + 'md5': digest.hexdigest(), + 'filename': os.path.basename(attachment), + 'filesize': os.path.getsize(attachment), + 'mtime': str(int(os.path.getmtime(attachment) * 1000)), + 'contentType': mtypes[0] or 'application/octet-stream', + 'charset': mtypes[1] } - upload_reg = requests.post( + auth_req = requests.post( url=self.endpoint + '/users/{u}/items/{i}/file?key={k}'.format( u=self.library_id, i=created[idx]['key'], k=self.api_key), - data=reg_data, - headers=reg_headers) + data=data, + headers=auth_headers) try: - upload_reg.raise_for_status() + auth_req.raise_for_status() except requests.exceptions.HTTPError: - error_handler(upload_reg) + error_handler(auth_req) + return json.loads(auth_req.text) def uploadfile(authdata): """ @@ -724,41 +735,30 @@ class Zotero(object): error_handler(upload) return register_upload(authdata) - def get_auth(attachment): + def register_upload(authdata): """ - Step 1: get upload authorisation for a file + Step 3: upload successful, so register it """ - mtypes = mimetypes.guess_type(attachment) - digest = hashlib.md5() - with open(attachment, 'rb') as f: - for chunk in iter(lambda: f.read(8192), b''): - digest.update(chunk) - auth_headers = { + reg_headers = { 'Content-Type': 'application/x-www-form-urlencoded', 'If-None-Match': '*', 'User-Agent': 'Pyzotero/%s' % __version__ } - data = { - 'md5': digest.hexdigest(), - 'filename': os.path.basename(attachment), - 'filesize': os.path.getsize(attachment), - 'mtime': str(int(os.path.getmtime(attachment) * 1000)), - 'contentType': mtypes[0] or 'application/octet-stream', - 'charset': mtypes[1] + reg_data = { + 'upload': authdata.get('uploadKey') } - auth_req = requests.post( + upload_reg = requests.post( url=self.endpoint + '/users/{u}/items/{i}/file?key={k}'.format( u=self.library_id, i=created[idx]['key'], k=self.api_key), - data=data, - headers=auth_headers) + data=reg_data, + headers=reg_headers) try: - auth_req.raise_for_status() + upload_reg.raise_for_status() except requests.exceptions.HTTPError: - error_handler(auth_req) - return json.loads(auth_req.text) + error_handler(upload_reg) created = create_prelim(payload) for idx, content in enumerate(created):
Refactor attachment method to make it more readable
urschrei_pyzotero
train
1d6cf1356d7122b84dbe5210e38bbe5e9d2d867e
diff --git a/models/classes/search/SearchQueryFactory.php b/models/classes/search/SearchQueryFactory.php index <HASH>..<HASH> 100644 --- a/models/classes/search/SearchQueryFactory.php +++ b/models/classes/search/SearchQueryFactory.php @@ -42,7 +42,7 @@ class SearchQueryFactory extends ConfigurableService return new SearchQuery( $params['params']['query'], - $params['params']['rootNode'], + $params['params']['structure'], $params['params']['parentNode'], $startRow, $rows,
In elastic search type should be a structure variable from request
oat-sa_tao-core
train
5eba62b47015092c091ee3ad3a170d3b430ba5cf
diff --git a/python/orca/src/bigdl/orca/ray/raycontext.py b/python/orca/src/bigdl/orca/ray/raycontext.py index <HASH>..<HASH> 100755 --- a/python/orca/src/bigdl/orca/ray/raycontext.py +++ b/python/orca/src/bigdl/orca/ray/raycontext.py @@ -123,6 +123,7 @@ class RayServiceFuncGenerator(object): def __init__(self, python_loc, redis_port, ray_node_cpu_cores, password, object_store_memory, verbose=False, env=None, + include_webui=False, extra_params=None): """object_store_memory: integer in bytes""" self.env = env @@ -133,6 +134,7 @@ class RayServiceFuncGenerator(object): self.ray_exec = self._get_ray_exec() self.object_store_memory = object_store_memory self.extra_params = extra_params + self.include_webui = include_webui self.verbose = verbose # _mxnet_worker and _mxnet_server are resource tags for distributed MXNet training only # in order to diff worker from server. @@ -161,10 +163,11 @@ class RayServiceFuncGenerator(object): return command def _gen_master_command(self): + webui = "true" if self.include_webui else "false" command = "{} start --head " \ - "--include-webui true --redis-port {} " \ + "--include-webui {} --redis-port {} " \ "--redis-password {} --num-cpus {}". \ - format(self.ray_exec, self.redis_port, self.password, + format(self.ray_exec, webui, self.redis_port, self.password, self.ray_node_cpu_cores) if self.labels: command = command + " " + self.labels @@ -260,7 +263,7 @@ class RayContext(object): _active_ray_context = None def __init__(self, sc, redis_port=None, password="123456", object_store_memory=None, - verbose=False, env=None, extra_params=None, + verbose=False, env=None, extra_params=None, include_webui=False, num_ray_nodes=None, ray_node_cpu_cores=None): """ The RayContext would initiate a ray cluster on top of the configuration of SparkContext. @@ -285,6 +288,7 @@ class RayContext(object): :param env: The environment variable dict for running ray processes. Default is None. :param extra_params: The key value dict for extra options to launch ray. For example, extra_params={"temp-dir": "/tmp/ray/"} + :param include_webui: whether to include ray web ui :param num_ray_nodes: The number of raylets to start across the cluster. For Spark local mode, you don't need to specify this value. For Spark cluster mode, it is default to be the number of Spark executors. If @@ -309,6 +313,7 @@ class RayContext(object): self.ray_processesMonitor = None self.env = env self.extra_params = extra_params + self.include_webui = include_webui self._address_info = None if self.is_local: self.num_ray_nodes = 1 @@ -373,6 +378,7 @@ class RayContext(object): object_store_memory=self.object_store_memory, verbose=self.verbose, env=self.env, + include_webui=self.include_webui, extra_params=self.extra_params) RayContext._active_ray_context = self @@ -457,6 +463,7 @@ class RayContext(object): self._address_info = ray.init(num_cpus=self.ray_node_cpu_cores, redis_password=self.redis_password, object_store_memory=self.object_store_memory, + include_webui=self.include_webui, resources=self.extra_params) else: self.cluster_ips = self._gather_cluster_ips()
attempt to fix ray memory (#<I>) * attempt to fix ray memory * exclude webui
intel-analytics_BigDL
train
6fc5e07ccd5027b090c17c59e0971ee32f559991
diff --git a/web/DraggingGestureHandler.js b/web/DraggingGestureHandler.js index <HASH>..<HASH> 100644 --- a/web/DraggingGestureHandler.js +++ b/web/DraggingGestureHandler.js @@ -1,19 +1,27 @@ import GestureHandler from './GestureHandler'; +import { PixelRatio } from 'react-native'; class DraggingGestureHandler extends GestureHandler { get shouldEnableGestureOnSetup() { return true; } - transformNativeEvent({ deltaX, deltaY, velocityX, velocityY, center: { x, y } }) { - const rect = this.view.getBoundingClientRect(); + transformNativeEvent({ + deltaX, + deltaY, + velocityX, + velocityY, + center: { x, y }, + }) { + const rect = this.view.getBoundingClientRect(); + const ratio = PixelRatio.get(); return { translationX: deltaX - (this.__initialX || 0), translationY: deltaY - (this.__initialY || 0), absoluteX: x, absoluteY: y, - velocityX, - velocityY, + velocityX: velocityX * ratio, + velocityY: velocityY * ratio, x: x - rect.left, y: y - rect.top, };
Multiply velocity to match velocity on native devices (#<I>) Velocity based gestures on web don't seem to match native gestures. This will make the interactions a little bit closer.
kmagiera_react-native-gesture-handler
train
adb9ecd47cc1afb5d6bc93adee408f3360bbbd72
diff --git a/core/src/main/java/io/undertow/server/HttpParser.java b/core/src/main/java/io/undertow/server/HttpParser.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/io/undertow/server/HttpParser.java +++ b/core/src/main/java/io/undertow/server/HttpParser.java @@ -158,9 +158,16 @@ public abstract class HttpParser { public void handle(ByteBuffer buffer, final ParseState currentState, final HttpServerExchange builder) { if (currentState.state == ParseState.VERB) { handleHttpVerb(buffer, currentState, builder); - if (!buffer.hasRemaining()) { - return; + handlePath(buffer, currentState, builder); + handleHttpVersion(buffer, currentState, builder); + handleAfterVersion(buffer, currentState, builder); + while (currentState.state != ParseState.PARSE_COMPLETE && buffer.hasRemaining()) { + handleHeader(buffer, currentState, builder); + if (currentState.state == ParseState.HEADER_VALUE) { + handleHeaderValue(buffer, currentState, builder); + } } + return; } if (currentState.state == ParseState.PATH) { handlePath(buffer, currentState, builder); @@ -298,7 +305,7 @@ public abstract class HttpParser { queryParamPos = stringBuilder.length() + 1; } else if (next == '&' && parseState == QUERY_PARAM_NAME) { parseState = QUERY_PARAM_NAME; - if(state.mapCount++ > 1000) { + if (state.mapCount++ > 1000) { //todo: make configurable throw UndertowMessages.MESSAGES.tooManyQueryParameters(1000); } @@ -307,7 +314,7 @@ public abstract class HttpParser { queryParamPos = stringBuilder.length() + 1; } else if (next == '&' && parseState == QUERY_PARAM_VALUE) { parseState = QUERY_PARAM_NAME; - if(state.mapCount++ > 1000) { + if (state.mapCount++ > 1000) { //todo: make configurable throw UndertowMessages.MESSAGES.tooManyQueryParameters(1000); } @@ -351,7 +358,7 @@ public abstract class HttpParser { stringBuilder = new StringBuilder(); state.parseState = 0; - if(state.mapCount++ > 1000) { + if (state.mapCount++ > 1000) { //todo: make configurable throw UndertowMessages.MESSAGES.tooManyHeaders(1000); }
Parser changes to optimise the case where parsing suceeds on the first pass
undertow-io_undertow
train
bf0abf37273d5b4ba1120025d59a34376904ca74
diff --git a/lib/guard/dsl.rb b/lib/guard/dsl.rb index <HASH>..<HASH> 100644 --- a/lib/guard/dsl.rb +++ b/lib/guard/dsl.rb @@ -364,8 +364,6 @@ module Guard # @param [Hash] scopes the scope for the groups and plugins # def scope(scope = {}) - scope[:plugins] = Array(scope[:plugins] || scope[:plugin] || []) - scope[:groups] = Array(scope[:groups] || scope[:group] || []) ::Guard.setup_scope(scope) end end diff --git a/lib/guard/guardfile/evaluator.rb b/lib/guard/guardfile/evaluator.rb index <HASH>..<HASH> 100644 --- a/lib/guard/guardfile/evaluator.rb +++ b/lib/guard/guardfile/evaluator.rb @@ -229,6 +229,7 @@ module Guard ::Guard::UI.info(msg) ::Guard::Notifier.notify(msg, title: 'Guard re-evaluate') + ::Guard.setup_scope ::Guard.runner.run(:start) end end diff --git a/lib/guard/setuper.rb b/lib/guard/setuper.rb index <HASH>..<HASH> 100644 --- a/lib/guard/setuper.rb +++ b/lib/guard/setuper.rb @@ -92,10 +92,11 @@ module Guard # @see CLI#start # @see Dsl#scope # - def setup_scope(new_scope) + def setup_scope(scope = {}) + scope = _prepare_scope(scope) { groups: :add_group, plugins: :plugin }.each do |type, meth| - next unless new_scope[type] && new_scope[type].any? - scope[type] = new_scope[type].map do |item| + next unless scope[type].any? + @scope[type] = scope[type].map do |item| ::Guard.send(meth, item) end end @@ -311,8 +312,18 @@ module Guard def _load_guardfile _reset_all evaluate_guardfile - setup_scope(groups: options[:group], plugins: options[:plugin]) + setup_scope _setup_notifier end + + def _prepare_scope(scope) + plugins = Array(options[:plugin]) + plugins = Array(scope[:plugins] || scope[:plugin]) if plugins.empty? + + groups = Array(options[:group]) + groups = Array(scope[:groups] || scope[:group]) if groups.empty? + + { plugins: plugins, groups: groups } + end end end diff --git a/spec/lib/guard/guardfile/evaluator_spec.rb b/spec/lib/guard/guardfile/evaluator_spec.rb index <HASH>..<HASH> 100644 --- a/spec/lib/guard/guardfile/evaluator_spec.rb +++ b/spec/lib/guard/guardfile/evaluator_spec.rb @@ -476,6 +476,12 @@ describe Guard::Guardfile::Evaluator do evaluator.reevaluate_guardfile end end + + it 'configures the scope' do + expect(::Guard).to receive(:setup_scope) + + evaluator.reevaluate_guardfile + end end end diff --git a/spec/lib/guard/setuper_spec.rb b/spec/lib/guard/setuper_spec.rb index <HASH>..<HASH> 100644 --- a/spec/lib/guard/setuper_spec.rb +++ b/spec/lib/guard/setuper_spec.rb @@ -212,6 +212,54 @@ describe Guard::Setuper do end end + describe '.setup_scope' do + subject { Guard.setup(options) } + + let(:guardfile) do + %w(group guard).map do |scope| + %w(foo bar baz).map do |name| + "#{ scope } :#{ name } do; end;" + end + end.flatten.join + end + + let(:listener) { instance_double(Listen::Listener) } + + before do + stub_const 'Guard::Foo', Class.new(Guard::Plugin) + stub_const 'Guard::Bar', Class.new(Guard::Plugin) + stub_const 'Guard::Baz', Class.new(Guard::Plugin) + allow(Listen).to receive(:to).with(Dir.pwd, {}) { listener } + allow(Guard::Notifier).to receive(:turn_on) + end + + [:group, :plugin].each do |scope| + context "with the global #{scope} option specified" do + let(:options) do + { :guardfile_contents => guardfile, scope => %w(foo bar) } + end + + it 'configures the scope according to the global option' do + subject.setup_scope(scope => :baz) + + expect(subject.scope[:"#{scope}s"].map(&:name).map(&:to_s)).to \ + contain_exactly('foo', 'bar') + end + end + + context "without the global #{scope} option specified" do + let(:options) { { guardfile_contents: guardfile } } + + it 'configures the scope according to the given option' do + subject.setup_scope(scope => :baz) + + expect(subject.scope[:"#{scope}s"].map(&:name).map(&:to_s)).to \ + contain_exactly('baz') + end + end + end + end + describe '.reset_plugins' do before do allow(Listen).to receive(:to).with(Dir.pwd, {})
Configure scope on re-evaluation and prioritize CLI scope options
guard_guard
train
3ce42b00351405cd03d5d0c69eec4d1e9a4b151a
diff --git a/openid/consumer/stores.py b/openid/consumer/stores.py index <HASH>..<HASH> 100644 --- a/openid/consumer/stores.py +++ b/openid/consumer/stores.py @@ -3,12 +3,13 @@ import time class ConsumerAssociation(object): """This class represents a consumer's view of an association.""" - @classmethod def fromExpiresIn(cls, expires_in, *args, **kwargs): kwargs['issued'] = int(time.time()) kwargs['lifetime'] = expires_in return cls(*args, **kwargs) + fromExpiresIn = classmethod(fromExpiresIn) + def __init__(self, server_url, handle, secret, issued, lifetime): self.server_url = server_url self.handle = handle
[project @ Remove decorator syntax for Python pre-<I> compatibility]
necaris_python3-openid
train
fd0a18911197885f844edc61de0f0ca30c8c9ac9
diff --git a/lib/cisco_node_utils/cmd_ref/feature.yaml b/lib/cisco_node_utils/cmd_ref/feature.yaml index <HASH>..<HASH> 100644 --- a/lib/cisco_node_utils/cmd_ref/feature.yaml +++ b/lib/cisco_node_utils/cmd_ref/feature.yaml @@ -1,6 +1,7 @@ # feature --- fabric: + _exclude: [/N3/] config_get: "show feature-set" config_get_token: '/^fabric[\s\d]+(\w+)/' config_set: "<state> feature-set fabric" @@ -9,7 +10,7 @@ fabric_forwarding: _exclude: [/N3/] config_get: "show running | i ^feature" config_get_token: '/^feature fabric forwarding$/' - config_set: "<state> feature fabric forwarding" + config_set: "feature fabric forwarding" nv_overlay: _exclude: [/N(5|6)/] diff --git a/lib/cisco_node_utils/feature.rb b/lib/cisco_node_utils/feature.rb index <HASH>..<HASH> 100644 --- a/lib/cisco_node_utils/feature.rb +++ b/lib/cisco_node_utils/feature.rb @@ -21,8 +21,9 @@ module Cisco class Feature < NodeUtil def self.fabric_enable # install feature-set and enable it + return if fabric_enabled? config_set('feature', 'fabric', state: 'install') unless fabric_installed? - config_set('feature', 'fabric', state: '') unless fabric_enabled? + config_set('feature', 'fabric', state: '') end def self.fabric_enabled? @@ -33,6 +34,16 @@ module Cisco config_get('feature', 'fabric') !~ /^uninstalled/ end + def self.fabric_forwarding_enable + return if fabric_forwarding_enabled? + Feature.fabric_enable unless node.product_id =~ /N9/ + config_set('feature', 'fabric_forwarding') + end + + def self.fabric_forwarding_enabled? + config_get('feature', 'fabric_forwarding') + end + def self.nv_overlay_enabled? config_get('feature', 'nv_overlay') rescue Cisco::CliError => e diff --git a/lib/cisco_node_utils/vxlan_global.rb b/lib/cisco_node_utils/vxlan_global.rb index <HASH>..<HASH> 100644 --- a/lib/cisco_node_utils/vxlan_global.rb +++ b/lib/cisco_node_utils/vxlan_global.rb @@ -26,31 +26,13 @@ module Cisco class VxlanGlobal < NodeUtil # Constructor for vxlan_global def initialize(instantiate=true) - enable if instantiate && !VxlanGlobal.enabled - end - - def enable - Feature.fabric_enable unless Feature.fabric_enabled? - config_set('feature', 'fabric_forwarding', state: '') + Feature.fabric_forwarding_enable if instantiate end def disable - config_set('feature', 'fabric_forwarding', state: 'no') if - Feature.fabric_enabled? dup_host_mac_detection_default end - # Check current state of the configuration - def self.enabled - feat = config_get('feature', 'fabric_forwarding') - return !(feat.nil? || feat.empty?) - rescue Cisco::CliError => e - # This cmd will syntax reject if feature is not - # enabled. Just catch the reject and return false. - return false if e.clierror =~ /Syntax error/ - raise - end - # ---------- # PROPERTIES # ---------- diff --git a/tests/test_vxlan_global.rb b/tests/test_vxlan_global.rb index <HASH>..<HASH> 100755 --- a/tests/test_vxlan_global.rb +++ b/tests/test_vxlan_global.rb @@ -13,6 +13,7 @@ # limitations under the License. require_relative 'ciscotest' +require_relative '../lib/cisco_node_utils/feature' require_relative '../lib/cisco_node_utils/vxlan_global' include Cisco @@ -33,13 +34,9 @@ class TestVxlanGlobal < CiscoTestCase config('no feature fabric forwarding') end - def test_on_off + def test_feature_on feat = VxlanGlobal.new - feat.enable - assert(VxlanGlobal.enabled) - - feat.disable - refute(VxlanGlobal.enabled) + assert(Feature.fabric_forwarding_enabled?) end def test_dup_host_ip_addr_detection_set
Refactor feature provider and fix for 9k
cisco_cisco-network-node-utils
train
e114e060285556e9927c78bbc96229b5beac4054
diff --git a/empire/pkg/awsutil/awsutil.go b/empire/pkg/awsutil/awsutil.go index <HASH>..<HASH> 100644 --- a/empire/pkg/awsutil/awsutil.go +++ b/empire/pkg/awsutil/awsutil.go @@ -4,6 +4,7 @@ import ( "encoding/json" "fmt" "io" + "io/ioutil" "net/http" "strings" ) @@ -17,7 +18,7 @@ type Request struct { func (r *Request) String() string { body, err := formatJSON(strings.NewReader(r.Body)) if err != nil { - panic(err) + body = r.Body } return fmt.Sprintf("Operation: %s\nBody: %s", r.Operation, body) } @@ -47,11 +48,16 @@ func NewHandler(m map[Request]Response) *Handler { } func (h *Handler) ServeHTTP(w http.ResponseWriter, r *http.Request) { - body, err := formatJSON(r.Body) + raw, err := ioutil.ReadAll(r.Body) if err != nil { panic(err) } + body, err := formatJSON(r.Body) + if err != nil { + body = string(raw) + } + match := Request{ Operation: r.Header.Get("X-Amz-Target"), Body: body,
Allow awsutil.Handler to fallback to raw io unable to json parse
remind101_empire
train
8d8bf13986b3d32eff65ad1ac9c81f18fad95451
diff --git a/docs/pages/components/tag/api/tag.js b/docs/pages/components/tag/api/tag.js index <HASH>..<HASH> 100644 --- a/docs/pages/components/tag/api/tag.js +++ b/docs/pages/components/tag/api/tag.js @@ -24,7 +24,7 @@ export default [ }, { name: '<code>size</code>', - description: 'Size of the tab, optional', + description: 'Size of the tag, optional', type: 'String', values: '<code>is-medium</code>, <code>is-large</code>', default: '—' @@ -92,7 +92,7 @@ export default [ props: [ { name: '<code>attached</code>', - description: 'Tabs inside are attached together', + description: 'Tags inside are attached together', type: 'Boolean', values: '—', default: '<code>false</code>'
[docs/tag/api] Fix typo (#<I>)
buefy_buefy
train
599b2dd2a912842093a813ae07f04ad3000aeb5b
diff --git a/commands/WirelessCommand/index.js b/commands/WirelessCommand/index.js index <HASH>..<HASH> 100644 --- a/commands/WirelessCommand/index.js +++ b/commands/WirelessCommand/index.js @@ -154,9 +154,9 @@ WirelessCommand.prototype.__networks = function networks(err, dat) { prompt([{ type: 'confirm', - name: 'setupAll', - message: 'Multiple Photons detected nearby. Would you like to perform setup on all of them now?', - default: false, + name: 'setup', + message: 'Multiple Photons detected nearby. Would you like to select one to setup now?', + default: true, }], multipleChoice); } @@ -198,15 +198,15 @@ WirelessCommand.prototype.__networks = function networks(err, dat) { if(ans.setupAll) { - self.__batch = detectedDevices; - self.setup(null); + self.__manual = true; + return self.setup(null, manualDone); } else { // Select any/all Photons to setup prompt([{ - type: 'checkbox', + type: 'list', name: 'selected', message: 'Please select which Photons you would like to setup at this time.', choices: detectedDevices
JK about the whole 'setup all the photons at once' thing...
particle-iot_particle-cli
train
f8663c90f3474a97175cd4ffcb35b190438df39b
diff --git a/tests/check_autoloadlibrary.py b/tests/check_autoloadlibrary.py index <HASH>..<HASH> 100755 --- a/tests/check_autoloadlibrary.py +++ b/tests/check_autoloadlibrary.py @@ -34,7 +34,7 @@ class AutoLoadLibrary(u.TestCase): broken_modules.append('%s: %s' % \ (moduleName, os.path.join(subdir, fileName))) - self.assertEqual( len(broken_modules), 0, + self.assertEqual( len(broken_modules), 0, \ "\nThe following modules do not contain the standard auto load library code:\n" + \ "\n".join(broken_modules) + \ "\nConsider adding the following snippet to the SWIG interface file:\n" + \
ctest: Fix for autoloadlibrary ctest Update to fix a python indentation error.
intel-iot-devkit_upm
train
f9c57d2e87e7ea58270df03dc34264d182e448b1
diff --git a/src/GeoDataCollection.js b/src/GeoDataCollection.js index <HASH>..<HASH> 100644 --- a/src/GeoDataCollection.js +++ b/src/GeoDataCollection.js @@ -1577,8 +1577,26 @@ GeoDataCollection.prototype.addGeoJsonLayer = function(geojson, layer) { layer.style.polygon.fillcolor = layer.style.line.color; layer.style.polygon.fillcolor.alpha = 0.75; } - - var newDataSource = new GeoJsonDataSource(); + + // If this GeoJSON is an object literal with a single property, treat that + // property as the name of the data source, and the property's value as the + // actual GeoJSON. + var numProperties = 0; + var propertyName; + for (propertyName in geojson) { + if (geojson.hasOwnProperty(propertyName)) { + ++numProperties; + if (numProperties > 1) { + break; // no need to count past 2 properties. + } + } + } + + var name; + if (numProperties === 1) { + name = propertyName; + geojson = geojson[propertyName]; + } //Reprojection and downsampling var crs_code = getCrsCode(geojson); @@ -1601,7 +1619,9 @@ GeoDataCollection.prototype.addGeoJsonLayer = function(geojson, layer) { } if (this.map === undefined) { - //create the object + //create the object + var newDataSource = new GeoJsonDataSource(name); + newDataSource.load(geojson).then(function() { var entities = newDataSource.entities.entities;
Handle SA GeoJSON with mysterious extra envelope around it.
TerriaJS_terriajs
train
d7c182ef3bf906ba492cddf16e43ccd0fc83f71d
diff --git a/fluent_contents/templatetags/fluent_contents_tags.py b/fluent_contents/templatetags/fluent_contents_tags.py index <HASH>..<HASH> 100644 --- a/fluent_contents/templatetags/fluent_contents_tags.py +++ b/fluent_contents/templatetags/fluent_contents_tags.py @@ -305,8 +305,17 @@ def _get_placeholder_arg(arg_name, placeholder): elif isinstance(placeholder, Placeholder): return placeholder elif isinstance(placeholder, Manager): + manager = placeholder try: - return placeholder.all()[0] + parent_object = manager.instance # read RelatedManager code + except AttributeError: + parent_object = None + + try: + placeholder = manager.all()[0] + if parent_object is not None: + placeholder.parent = parent_object # Fill GFK cache + return placeholder except IndexError: raise RuntimeWarning(u"No placeholders found for query '{0}.all.0'".format(arg_name)) else: diff --git a/fluent_contents/tests/templatetags.py b/fluent_contents/tests/templatetags.py index <HASH>..<HASH> 100644 --- a/fluent_contents/tests/templatetags.py +++ b/fluent_contents/tests/templatetags.py @@ -126,10 +126,10 @@ class TemplateTagTests(AppTestCase): # Test passing a related object manager. # - fetch Placeholder - # - fetch parent (in RenderPlaceholderNode.render_tag) + # - parent is taken from RelatedManager # - fetch ContentItem # - fetch RawHtmlTestItem - with self.assertNumQueries(4) as ctx: + with self.assertNumQueries(3) as ctx: html = self._render("""{% load fluent_contents_tags %}{% render_placeholder page2.placeholder_set %}""", {'page2': page2}) self.assertEqual(html, u'<b>Item1!</b><b>Item2!</b>')
Optimize render_placeholder with manager, avoid parent query
django-fluent_django-fluent-contents
train
adda7130a28de90be54de13e200a64a35876d6b5
diff --git a/dist/pptxgen.js b/dist/pptxgen.js index <HASH>..<HASH> 100644 --- a/dist/pptxgen.js +++ b/dist/pptxgen.js @@ -3297,7 +3297,13 @@ var PptxGenJS = function(){ var strXml = ''; // Build cat axis tag - strXml += '<c:'+ (opts.catLabelFormatCode ? 'dateAx' : 'catAx') +'>'; + // NOTE: Scatter and Bubble chart need two Val axises as they display numbers on x axis + if(opts.type.name == 'scatter' || opts.type.name == 'bubble'){ + strXml += '<c:valAx>'; + } + else{ + strXml += '<c:' + (opts.catLabelFormatCode ? 'dateAx' : 'catAx') + '>'; + } strXml += ' <c:axId val="'+ axisId +'"/>'; strXml += ' <c:scaling><c:orientation val="'+ (opts.catAxisOrientation || (opts.barDir == 'col' ? 'minMax' : 'minMax')) +'"/></c:scaling>'; strXml += ' <c:delete val="'+ (opts.catAxisHidden ? 1 : 0) +'"/>'; @@ -3313,7 +3319,13 @@ var PptxGenJS = function(){ title: opts.catAxisTitle || 'Axis Title' }); } - strXml += ' <c:numFmt formatCode="'+ (opts.catLabelFormatCode || "General") +'" sourceLinked="0"/>'; + //NOTE: Adding Val Axis Formatting if scatter or bubble charts + if(opts.type.name == 'scatter' || opts.type.name == 'bubble'){ + strXml += ' <c:numFmt formatCode="'+ (opts.valAxisLabelFormatCode ? opts.valAxisLabelFormatCode : 'General') +'" sourceLinked="0"/>'; + } + else{ + strXml += ' <c:numFmt formatCode="'+ (opts.catLabelFormatCode || "General") +'" sourceLinked="0"/>'; + } if ( opts.type.name === 'scatter' ) { strXml += ' <c:majorTickMark val="none"/>'; strXml += ' <c:minorTickMark val="none"/>'; @@ -3368,7 +3380,13 @@ var PptxGenJS = function(){ } // Close cat axis tag - strXml += '</c:'+ (opts.catLabelFormatCode ? 'dateAx' : 'catAx') +'>'; + //NOTE: Added closing tag of val or cat axis based on chart type + if(opts.type.name == 'scatter' || opts.type.name == 'bubble'){ + strXml += '</c:valAx>'; + } + else{ + strXml += '</c:' + (opts.catLabelFormatCode ? 'dateAx' : 'catAx') + '>'; + } return strXml; }
Irregular Display of Cat Axis for Scatter and Bubble In case of Scatter and Bubble charts, the category axis also contains numbers just like value axis. Therefore, on putting values having more than 2 digits in cat axis, the display of category labels is being compromised. Therefore the charts should contain two Val axises in case of Scatter and Bubble.
gitbrent_PptxGenJS
train
6bd66459f61ec5b33427027192bc6596aee5adfd
diff --git a/specter/expect.py b/specter/expect.py index <HASH>..<HASH> 100644 --- a/specter/expect.py +++ b/specter/expect.py @@ -63,7 +63,7 @@ class ExpectAssert(object): def be_none(self): self._compare(action_name=_('be'), expected=None, - condition=self.target == None) + condition=self.target is None) def be_true(self): self._compare(action_name=_('be'), expected=True,
Switching to "is" instead of "=="
jmvrbanac_Specter
train
0558b9c44b35b22c21d37f68c01525b6cc6df990
diff --git a/api/operator.go b/api/operator.go index <HASH>..<HASH> 100644 --- a/api/operator.go +++ b/api/operator.go @@ -58,7 +58,7 @@ func (op *Operator) RaftRemovePeerByAddress(address raft.ServerAddress, q *Write r.setWriteOptions(q) // TODO (slackpad) Currently we made address a query parameter. Once - // IDs are in place this will be DELETE /v1/raft-peer/<id>. + // IDs are in place this will be DELETE /v1/operator/raft/peer/<id>. r.params.Set("address", string(address)) _, resp, err := requireOK(op.c.doRequest(r))
Fixes a stale comment.
hashicorp_consul
train
a467f878dd4e88f4487b3b7392d1244846407e96
diff --git a/common-core-open/src/main/java/com/bbn/bue/common/files/FileUtils.java b/common-core-open/src/main/java/com/bbn/bue/common/files/FileUtils.java index <HASH>..<HASH> 100755 --- a/common-core-open/src/main/java/com/bbn/bue/common/files/FileUtils.java +++ b/common-core-open/src/main/java/com/bbn/bue/common/files/FileUtils.java @@ -541,6 +541,21 @@ public final class FileUtils { return ret.build(); } + /** + * Loads a file in the format {@code key value1} (tab-separated) into a {@link + * com.google.common.collect.ImmutableMap} of {@link String}s. Each key should only appear on one + * line, and there should be no duplicate values. Each key and value has whitespace trimmed off. + * Skips empty lines and allows comment-lines with {@code #} in the first position. + */ + public static ImmutableMap<Symbol, Symbol> loadSymbolMap(CharSource source) throws IOException { + final ImmutableMap.Builder<Symbol, Symbol> ret = ImmutableMap.builder(); + + for (ImmutableMap.Entry<String, String> row : loadStringMap(source).entrySet()) { + ret.put(Symbol.from(row.getKey()), Symbol.from(row.getValue())); + } + + return ret.build(); + } public static void writeSymbolMultimap(Multimap<Symbol, Symbol> mm, CharSink charSink) throws IOException {
add a convenience method that loads a two-column file as a map of Symbols to Symbols
BBN-E_bue-common-open
train
1632330df524f5be92ef80c6cac2dee113f9435d
diff --git a/src/Console/ChromeDriverCommand.php b/src/Console/ChromeDriverCommand.php index <HASH>..<HASH> 100644 --- a/src/Console/ChromeDriverCommand.php +++ b/src/Console/ChromeDriverCommand.php @@ -167,7 +167,7 @@ class ChromeDriverCommand extends Command { $home = $this->getUrl($this->homeUrl); - preg_match('/Latest stable release:.*?\?path=([\d.]+)/', $home, $matches); + preg_match('/release:.*?\?path=([\d.]+)/', $home, $matches); return $matches[1]; }
Update regular expression base on website changes Some markup is updated on <URL>
laravel_dusk
train
210e6b2790a841b4b8a5e8de6649bea736d05200
diff --git a/simulator/src/main/java/com/hazelcast/simulator/common/SimulatorProperties.java b/simulator/src/main/java/com/hazelcast/simulator/common/SimulatorProperties.java index <HASH>..<HASH> 100644 --- a/simulator/src/main/java/com/hazelcast/simulator/common/SimulatorProperties.java +++ b/simulator/src/main/java/com/hazelcast/simulator/common/SimulatorProperties.java @@ -128,6 +128,10 @@ public class SimulatorProperties { return parseInt(get("HAZELCAST_PORT_RANGE_SIZE", "50")); } + public String getCloudProvider() { + return get("CLOUD_PROVIDER"); + } + public String get(String name) { String value = (String) properties.get(name); diff --git a/simulator/src/main/java/com/hazelcast/simulator/provisioner/ProvisionerCli.java b/simulator/src/main/java/com/hazelcast/simulator/provisioner/ProvisionerCli.java index <HASH>..<HASH> 100644 --- a/simulator/src/main/java/com/hazelcast/simulator/provisioner/ProvisionerCli.java +++ b/simulator/src/main/java/com/hazelcast/simulator/provisioner/ProvisionerCli.java @@ -26,6 +26,7 @@ import org.jclouds.compute.ComputeService; import static com.hazelcast.simulator.common.SimulatorProperties.PROPERTIES_FILE_NAME; import static com.hazelcast.simulator.utils.CliUtils.initOptionsWithHelp; import static com.hazelcast.simulator.utils.CliUtils.printHelpAndExit; +import static com.hazelcast.simulator.utils.CloudProviderUtils.isStatic; import static com.hazelcast.simulator.utils.SimulatorUtils.loadSimulatorProperties; final class ProvisionerCli { @@ -70,7 +71,7 @@ final class ProvisionerCli { OptionSet options = initOptionsWithHelp(cli.parser, args); SimulatorProperties properties = loadSimulatorProperties(options, cli.propertiesFileSpec); - ComputeService computeService = new ComputeServiceBuilder(properties).build(); + ComputeService computeService = isStatic(properties) ? null : new ComputeServiceBuilder(properties).build(); Bash bash = new Bash(properties); return new Provisioner(properties, computeService, bash); } diff --git a/simulator/src/main/java/com/hazelcast/simulator/utils/CloudProviderUtils.java b/simulator/src/main/java/com/hazelcast/simulator/utils/CloudProviderUtils.java index <HASH>..<HASH> 100644 --- a/simulator/src/main/java/com/hazelcast/simulator/utils/CloudProviderUtils.java +++ b/simulator/src/main/java/com/hazelcast/simulator/utils/CloudProviderUtils.java @@ -15,6 +15,8 @@ */ package com.hazelcast.simulator.utils; +import com.hazelcast.simulator.common.SimulatorProperties; + public final class CloudProviderUtils { public static final String PROVIDER_STATIC = "static"; @@ -24,6 +26,10 @@ public final class CloudProviderUtils { private CloudProviderUtils() { } + public static boolean isStatic(SimulatorProperties properties) { + return PROVIDER_STATIC.equals(properties.getCloudProvider()); + } + public static boolean isStatic(String cloudProvider) { return PROVIDER_STATIC.equals(cloudProvider); } diff --git a/simulator/src/test/java/com/hazelcast/simulator/utils/CloudProviderUtilsTest.java b/simulator/src/test/java/com/hazelcast/simulator/utils/CloudProviderUtilsTest.java index <HASH>..<HASH> 100644 --- a/simulator/src/test/java/com/hazelcast/simulator/utils/CloudProviderUtilsTest.java +++ b/simulator/src/test/java/com/hazelcast/simulator/utils/CloudProviderUtilsTest.java @@ -1,5 +1,6 @@ package com.hazelcast.simulator.utils; +import com.hazelcast.simulator.common.SimulatorProperties; import org.junit.Test; import static com.hazelcast.simulator.utils.CloudProviderUtils.PROVIDER_EC2; @@ -8,6 +9,8 @@ import static com.hazelcast.simulator.utils.CloudProviderUtils.PROVIDER_STATIC; import static com.hazelcast.simulator.utils.ReflectionUtils.invokePrivateConstructor; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class CloudProviderUtilsTest { @@ -27,6 +30,22 @@ public class CloudProviderUtilsTest { } @Test + public void testIsStatic_fromProperties_true() throws Exception { + SimulatorProperties properties = mock(SimulatorProperties.class); + when(properties.getCloudProvider()).thenReturn(PROVIDER_STATIC); + + assertTrue(CloudProviderUtils.isStatic(properties)); + } + + @Test + public void testIsStatic_fromProperties_false() throws Exception { + SimulatorProperties properties = mock(SimulatorProperties.class); + when(properties.getCloudProvider()).thenReturn(PROVIDER_GCE); + + assertFalse(CloudProviderUtils.isStatic(properties)); + } + + @Test public void testIsEC2_true() throws Exception { assertTrue(CloudProviderUtils.isEC2(PROVIDER_EC2)); }
Fixed bug in static setup when EC2 identity files are not present.
hazelcast_hazelcast-simulator
train
40812cab9ef922177eafcb17187d4f1dfa8f3d9d
diff --git a/openid/extensions/sreg.py b/openid/extensions/sreg.py index <HASH>..<HASH> 100644 --- a/openid/extensions/sreg.py +++ b/openid/extensions/sreg.py @@ -51,6 +51,8 @@ __all__ = [ 'SRegResponse', 'data_fields', 'ns_uri', + 'ns_uri_1_0', + 'ns_uri_1_1', 'supportsSReg', ]
[project @ Add sreg <I> and <I> URI variables to __all__]
necaris_python3-openid
train
6cbe0e60966d77037e691a8e4de8a05e3a77edad
diff --git a/superset/connectors/druid/models.py b/superset/connectors/druid/models.py index <HASH>..<HASH> 100644 --- a/superset/connectors/druid/models.py +++ b/superset/connectors/druid/models.py @@ -1018,7 +1018,7 @@ class DruidDatasource(Model, BaseDatasource): del qry['dimensions'] qry['metric'] = list(qry['aggregations'].keys())[0] client.topn(**qry) - elif len(groupby) > 1 or having_filters or not order_desc: + else: # If grouping on multiple fields or using a having filter # we have to force a groupby query if timeseries_limit and is_timeseries:
Fixed branching condition with dimension spec (#<I>)
apache_incubator-superset
train
ead1c32636a7e00cff75f6664deed497dfec5a55
diff --git a/mantrid/loadbalancer.py b/mantrid/loadbalancer.py index <HASH>..<HASH> 100644 --- a/mantrid/loadbalancer.py +++ b/mantrid/loadbalancer.py @@ -29,7 +29,7 @@ class Balancer(object): "spin": Spin, } - def __init__(self, listen_ports, management_port, state_file): + def __init__(self, listen_ports, management_port, state_file, uid=None, gid=65535): """ Constructor. @@ -42,6 +42,8 @@ class Balancer(object): self.listen_ports = listen_ports self.management_port = management_port self.state_file = state_file + self.uid = uid + self.gid = gid @classmethod def main(cls): @@ -61,7 +63,13 @@ class Balancer(object): resource.setrlimit(resource.RLIMIT_NOFILE, (cls.nofile, cls.nofile)) except (ValueError, resource.error): logging.warning("Cannot raise resource limits (run as root/change ulimits)") - balancer = cls({80: False}, 8042, "/tmp/mantrid.state") + balancer = cls( + {80: False}, + 8042, + "/tmp/mantrid.state", + 4321, + 4321, + ) balancer.run() def load(self): @@ -90,12 +98,51 @@ class Balancer(object): # First, initialise the process self.load() self.running = True + # Try to ensure the state file is readable + state_dir = os.path.dirname(self.state_file) + if not os.path.isdir(state_dir): + os.makedirs(state_dir) + if self.uid is not None: + try: + os.chown(state_dir, self.uid, -1) + except OSError: + pass + try: + os.chown(self.state_file, self.uid, -1) + except OSError: + pass # Then, launch the socket loops pool = eventlet.GreenPile(len(self.listen_ports) + 2) pool.spawn(self.save_loop) pool.spawn(self.management_loop, self.management_port) for port, internal in self.listen_ports.items(): pool.spawn(self.listen_loop, port, internal) + # Give the other threads a chance to open their listening sockets + eventlet.sleep(0.5) + # Drop to the lesser UID/GIDs, if supplied + if self.gid: + try: + os.setegid(self.gid) + os.setgid(self.gid) + except OSError: + logging.error("Cannot change to GID %i (probably not running as root)" % self.gid) + else: + logging.info("Dropped to GID %i" % self.gid) + if self.uid: + try: + os.seteuid(0) + os.setuid(self.uid) + os.seteuid(self.uid) + except OSError: + logging.error("Cannot change to UID %i (probably not running as root)" % self.uid) + else: + logging.info("Dropped to UID %i" % self.uid) + # Ensure we can save to the state file, or die hard. + try: + open(self.state_file, "a").close() + except (OSError, IOError): + logging.critical("Cannot write to state file %s" % self.state_file) + sys.exit(1) # Wait for one to exit, or for a clean/forced shutdown try: pool.next() @@ -131,6 +178,9 @@ class Balancer(object): Accepts management requests. """ sock = eventlet.listen(("::", port), socket.AF_INET6) + # Sleep to ensure we've dropped privileges by the time we start serving + eventlet.sleep(0.5) + # Actually serve management logging.info("Listening for management on port %i" % port) management_app = ManagementApp(self) with open("/dev/null", "w") as log_dest: @@ -156,6 +206,9 @@ class Balancer(object): logging.critical("Cannot listen on port %s (you must launch as root)" % port) return raise + # Sleep to ensure we've dropped privileges by the time we start serving + eventlet.sleep(0.5) + # Start serving logging.info("Listening for requests on port %i" % port) eventlet.serve( sock,
Implement dropping to a non-root UID/GID after startup for security reasons
epio_mantrid
train
01eadd10f75118d01688d990dc6c2f7f27786819
diff --git a/hibernate-ogm-core/src/test/java/org/hibernate/ogm/test/id/CompositeIdTest.java b/hibernate-ogm-core/src/test/java/org/hibernate/ogm/test/id/CompositeIdTest.java index <HASH>..<HASH> 100644 --- a/hibernate-ogm-core/src/test/java/org/hibernate/ogm/test/id/CompositeIdTest.java +++ b/hibernate-ogm-core/src/test/java/org/hibernate/ogm/test/id/CompositeIdTest.java @@ -95,6 +95,8 @@ public class CompositeIdTest extends JpaTestCase { assertThat( news.getNewsId().getAuthor() ).isEqualTo( author ); assertThat( news.getNewsId().getTitle() ).isEqualTo( titleOGM ); assertThat( news.getLabels().size() ).isEqualTo( newsOgmLabels.size() ); + em.remove( news ); + assertThat( em.find( News.class, newsOgmID ) ).isNull(); em.clear(); news = em.find( News.class, newsAboutJugID ); @@ -103,6 +105,8 @@ public class CompositeIdTest extends JpaTestCase { assertThat( news.getNewsId().getAuthor() ).isEqualTo( author ); assertThat( news.getNewsId().getTitle() ).isEqualTo( titleAboutJUG ); assertThat( news.getLabels().size() ).isEqualTo( newsAboutJugLabels.size() ); + em.remove( news ); + assertThat( em.find( News.class, newsAboutJugID ) ).isNull(); em.clear(); news = em.find( News.class, newsCountJugID ); @@ -111,6 +115,8 @@ public class CompositeIdTest extends JpaTestCase { assertThat( news.getNewsId().getAuthor() ).isEqualTo( author ); assertThat( news.getNewsId().getTitle() ).isEqualTo( titleCountJUG ); assertThat( news.getLabels().size() ).isEqualTo( newsCountJugLabels.size() ); + em.remove( news ); + assertThat( em.find( News.class, newsCountJugID ) ).isNull(); } finally { commitOrRollback( operationSuccessful );
OGM-<I> remove persisted object and check if they have been deleted
hibernate_hibernate-ogm
train
a8b29c41cf99f3fa28a60cd95a72dfb699cb6bfd
diff --git a/rejected/mcp.py b/rejected/mcp.py index <HASH>..<HASH> 100644 --- a/rejected/mcp.py +++ b/rejected/mcp.py @@ -20,8 +20,7 @@ LOGGER = logging.getLogger(__name__) class MasterControlProgram(common.State): """Master Control Program keeps track of and manages consumer processes.""" - _MIN_CONSUMERS = 1 - _MAX_CONSUMERS = 2 + _DEFAULT_CONSUMER_QTY = 1 _MAX_SHUTDOWN_WAIT = 10 _POLL_INTERVAL = 60.0 _POLL_RESULTS_INTERVAL = 3.0 @@ -30,7 +29,8 @@ class MasterControlProgram(common.State): def __init__(self, config, consumer=None, profile=None, quantity=None): """Initialize the Master Control Program - :param dict config: The full content from the YAML config file + :param config: The full content from the YAML config file + :type config: helper.config.Config :param str consumer: If specified, only run processes for this consumer :param str profile: Optional profile output directory to enable profiling @@ -77,20 +77,20 @@ class MasterControlProgram(common.State): if int(child.pid) == os.getpid(): continue try: - process = psutil.Process(child.pid) + proc = psutil.Process(child.pid) except psutil.NoSuchProcess: dead_processes.append((consumer, name)) continue - if self.is_a_dead_or_zombie_process(process): + if self.is_a_dead_or_zombie_process(proc): dead_processes.append((consumer, name)) else: active_processes.append(child) if dead_processes: LOGGER.debug('Removing %i dead process(es)', len(dead_processes)) - for process in dead_processes: - self.remove_consumer_process(*process) + for proc in dead_processes: + self.remove_consumer_process(*proc) return active_processes def calculate_stats(self, data): @@ -186,13 +186,13 @@ class MasterControlProgram(common.State): for connection in configuration['connections']: connections[connection] = list() return {'connections': connections, - 'min': configuration.get('min', self._MIN_CONSUMERS), - 'max': configuration.get('max', self._MAX_CONSUMERS), + 'qty': configuration.get('qty', self._DEFAULT_CONSUMER_QTY), 'last_proc_num': 0, 'queue': configuration['queue'], 'processes': dict()} - def consumer_keyword(self, counts): + @staticmethod + def consumer_keyword(counts): """Return consumer or consumers depending on the process count. :param dict counts: The count dictionary to use process count @@ -223,7 +223,8 @@ class MasterControlProgram(common.State): """ return self._consumers[consumer]['processes'].get(name) - def is_a_dead_or_zombie_process(self, process): + @staticmethod + def is_a_dead_or_zombie_process(process): """Checks to see if the specified process is a zombie or dead. :param psutil.Process: The process to check @@ -269,7 +270,7 @@ class MasterControlProgram(common.State): def log_stats(self): """Output the stats to the LOGGER.""" - LOGGER.info('%i total %s have processed %i messages with %i ' + LOGGER.info('%i total %s have processed %i messages with %i ' 'errors, waiting %.2f seconds and have spent %.2f seconds ' 'processing messages with an overall velocity of %.2f ' 'messages per second.', @@ -392,13 +393,13 @@ class MasterControlProgram(common.State): call ourselves in _POLL_RESULTS_INTERVAL. """ - LOGGER.info('Checking for poll results, %i items', - self._stats_queue.qsize()) - while self._stats_queue.qsize(): + LOGGER.debug('Checking for poll results') + while True: try: stats = self._stats_queue.get(False) except Queue.Empty: LOGGER.debug('Stats queue is empty') + break self._poll_data['processes'].remove(stats['name']) self.collect_results(stats) @@ -448,7 +449,7 @@ class MasterControlProgram(common.State): :rtype: int """ - return self._consumers[name]['min'] - self.process_count(name, + return self._consumers[name]['qty'] - self.process_count(name, connection) def remove_consumer_process(self, consumer, name): @@ -502,13 +503,15 @@ class MasterControlProgram(common.State): # Loop for the lifetime of the app, pausing for a signal to pop up while self.is_running and self.total_process_count: - self.set_state(self.STATE_SLEEPING) + if self._state != self.STATE_SLEEPING: + self.set_state(self.STATE_SLEEPING) signal.pause() # Note we're exiting run LOGGER.info('Exiting Master Control Program') - def set_process_name(self): + @staticmethod + def set_process_name(): """Set the process name for the top level process so that it shows up in logs in a more trackable fasion. @@ -585,12 +588,12 @@ class MasterControlProgram(common.State): self._consumers[name] = self.consumer_dict(config) if self._quantity: - self._consumers[name]['min'] = self._quantity + self._consumers[name]['qty'] = self._quantity # Iterate through the connections to create new consumer processes for connection in self._consumers[name]['connections']: self.start_processes(name, connection, - self._consumers[name]['min']) + self._consumers[name]['qty']) def stop_processes(self): """Iterate through all of the consumer processes shutting them down."""
Change configuration names - Remove deprecated min/max for consumer qty - Minor method signature cleanup
gmr_rejected
train
b99c317a223a31274cdef5291cf1bd2a5fef69ee
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -20,7 +20,7 @@ var noCaseSyntax = /NC/ , forbiddenSyntax = /F/ , goneSyntax = /G/ , typeSyntax = /T=([\w|\/]+,?)/ - , hostSyntax = /H=([\w|\d|\:|\.]),?/ + , hostSyntax = /H=([^,]+)/ , flagSyntax = /\[(.*)\]$/ , partsSyntax = /\s+|\t+/g , httpsSyntax = /^https/ diff --git a/test/test.js b/test/test.js index <HASH>..<HASH> 100644 --- a/test/test.js +++ b/test/test.js @@ -342,6 +342,24 @@ describe('Connect-modrewrite', function() { expect(req.url).to.equal('/b'); }); + it('should be able to parse with other flags', function() { + var middleware = modRewrite(['/a /b [H=(.+)\\.webview\\..*, L]']); + var req = { + connection : { encrypted : false }, + header : function() {}, + headers : { host : 'ios.webview.test.com' }, + url : '/a' + }; + var res = { + setHeader : function() {}, + writeHead : sinon.spy(), + end : sinon.spy() + }; + var next = function() {}; + middleware(req, res, next); + expect(req.url).to.equal('/b'); + }); + it('should not jump to the next rule if the host doesn\'t match', function() { var middleware = modRewrite(['/a /b [H=(.+)\\.webview\\..*]']); var req = {
Fixed multiple flags support in host flag
tinganho_connect-modrewrite
train
767b1bad27cdaf42fd991e399a202dd57184cda4
diff --git a/src/wyil/io/WyilFileWriter.java b/src/wyil/io/WyilFileWriter.java index <HASH>..<HASH> 100644 --- a/src/wyil/io/WyilFileWriter.java +++ b/src/wyil/io/WyilFileWriter.java @@ -222,8 +222,12 @@ public class WyilFileWriter implements Transform { children[k+1] = addStringItem(field); k = k + 2; } - } else if(t instanceof Type.FunctionOrMethod) { + } else if(t instanceof Type.Function) { kind = TYPE_Kind.FUNCTION; + // TODO: + } else if(t instanceof Type.Method) { + kind = TYPE_Kind.METHOD; + // TODO: } else if(t instanceof Type.Reference) { Type.Reference l = (Type.Reference) t; kind = TYPE_Kind.REFERENCE; @@ -235,21 +239,30 @@ public class WyilFileWriter implements Transform { children = new int[1]; children[0] = addTypeItem(l.element()); } else if(t instanceof Type.EffectiveList) { - - } else if(t instanceof Type.EffectiveSet) { - - } else if(t instanceof Type.EffectiveMap) { - - } else if(t instanceof Type.EffectiveTuple) { - - } else if(t instanceof Type.EffectiveRecord) { - - } else if(t instanceof Type.EffectiveIndexible) { - - } else if(t instanceof Type.EffectiveCollection) { - - } else if(t instanceof Type.Union) { - + kind = TYPE_Kind.EFFECTIVE_LIST; + } else if(t instanceof Type.Union) { + if(t instanceof Type.EffectiveSet) { + kind = TYPE_Kind.EFFECTIVE_SET; + } else if(t instanceof Type.EffectiveMap) { + kind = TYPE_Kind.EFFECTIVE_MAP; + } else if(t instanceof Type.EffectiveTuple) { + kind = TYPE_Kind.EFFECTIVE_TUPLE; + } else if(t instanceof Type.EffectiveRecord) { + kind = TYPE_Kind.EFFECTIVE_RECORD; + } else if(t instanceof Type.EffectiveIndexible) { + kind = TYPE_Kind.EFFECTIVE_INDEXIBLE; + } else if(t instanceof Type.EffectiveCollection) { + kind = TYPE_Kind.EFFECTIVE_COLLECTION; + } else { + kind = TYPE_Kind.UNION; + } + Type.Union u = (Type.Union) t; + HashSet<Type> bounds = u.bounds(); + children = new int[bounds.size()]; + int k = 0; + for(Type bound : bounds) { + children[k++] = addTypeItem(bound); + } } else { throw new IllegalArgumentException("unknown type encountered"); }
Ok, well it compiles now.
Whiley_WhileyCompiler
train
633cc68220a96afe7d94b70b0565cf3313fef008
diff --git a/spambl.py b/spambl.py index <HASH>..<HASH> 100644 --- a/spambl.py +++ b/spambl.py @@ -2,6 +2,7 @@ # -*- coding: utf-8 -*- from sys import exc_info +from dns.resolver import query, NXDOMAIN class SpamBLError(Exception): ''' Base exception class for spambl module ''' @@ -87,6 +88,24 @@ class DNSBL(object): hostname = str(hostname) yield hostname, hostname.rstrip('.') + + def _query_for(self, host_collection): + ''' Get hosts that are included both in this blocklist and in host_collection + + :param host_collection: a container with valid host values + :returns: an item listed on this DNSBL, as an instance of DNSBLItem + ''' + for host, prefix in self._get_host_and_query_prefix(host_collection): + try: + response = query(prefix+'.'+self._query_suffix) + + except NXDOMAIN: + pass + + else: + last_octet = response[0].to_text().split('.')[-1] + yield DNSBLItem(host, self, last_octet) + if __name__ == '__main__':
Add _query_for method to DNSBL class
piotr-rusin_spam-lists
train
544fd2cb75195281c5fa21e9cc83f166f730c097
diff --git a/tests/test_dict.py b/tests/test_dict.py index <HASH>..<HASH> 100644 --- a/tests/test_dict.py +++ b/tests/test_dict.py @@ -2,8 +2,8 @@ from collections.abc import Mapping, Sequence import pytest -from confidence.exceptions import ConfigurationError -from confidence.models import _NoDefault, Configuration +from confidence import Configuration, ConfigurationError +from confidence.models import _NoDefault def test_empty(): diff --git a/tests/test_io.py b/tests/test_io.py index <HASH>..<HASH> 100644 --- a/tests/test_io.py +++ b/tests/test_io.py @@ -3,9 +3,8 @@ from os import path import pytest from unittest.mock import call, patch -from confidence.io import DEFAULT_LOAD_ORDER, load, load_name, loaders, loadf, loads, Locality +from confidence import Configuration, DEFAULT_LOAD_ORDER, load, load_name, loaders, loadf, loads, Locality, NotConfigured from confidence.io import read_envvar_file, read_envvars, read_xdg_config_dirs, read_xdg_config_home -from confidence.models import Configuration, NotConfigured test_files = path.join(path.dirname(__file__), 'files') diff --git a/tests/test_loaders.py b/tests/test_loaders.py index <HASH>..<HASH> 100644 --- a/tests/test_loaders.py +++ b/tests/test_loaders.py @@ -1,6 +1,7 @@ from itertools import chain, groupby -from confidence.io import _LOADERS, DEFAULT_LOAD_ORDER, loaders, Locality +from confidence import DEFAULT_LOAD_ORDER, loaders, Locality +from confidence.io import _LOADERS def test_default_load_order_all_loaders():
Update imports in tests to prefer importing from main module
HolmesNL_confidence
train
0d69aca083f5d3e75ad3d47c2d94ea95be380400
diff --git a/tests/event-data/updateEvent.js b/tests/event-data/updateEvent.js index <HASH>..<HASH> 100644 --- a/tests/event-data/updateEvent.js +++ b/tests/event-data/updateEvent.js @@ -72,4 +72,45 @@ describe('updateEvent', function() { }); }); }); + + describe('when changing an event from timed to all-day', function() { + pushOptions({ + defaultView: 'month', + now: '2017-07-14', + events: [ + { id: '2', start: '2017-07-14T08:00:00Z', end: '2017-07-14T12:00:00Z' } + ] + }); + + it('reflects the change on the event object', function(done) { + var allRenderCnt = 0; + + initCalendar({ + eventAfterAllRender: function() { + var eventObj; + + allRenderCnt++; + + if (allRenderCnt === 1) { + eventObj = currentCalendar.clientEvents('2')[0]; + + expect(eventObj.allDay).toBe(false); + + eventObj.allDay = true; + eventObj.start = '2017-07-14'; + eventObj.end = '2017-07-15'; + currentCalendar.updateEvent(eventObj); + + eventObj = currentCalendar.clientEvents('2')[0]; + + expect(eventObj.allDay).toBe(true); + expect(eventObj.start.format()).toBe('2017-07-14'); + expect(eventObj.end.format()).toBe('2017-07-15'); + + done(); + } + } + }); + }); + }); });
test for changing allDay with updateEvent
fullcalendar_fullcalendar
train
554d48e8331636b21c94cf89ba31f428123836e0
diff --git a/h2o-core/src/main/java/water/fvec/Chunk.java b/h2o-core/src/main/java/water/fvec/Chunk.java index <HASH>..<HASH> 100644 --- a/h2o-core/src/main/java/water/fvec/Chunk.java +++ b/h2o-core/src/main/java/water/fvec/Chunk.java @@ -142,6 +142,11 @@ public abstract class Chunk extends Iced implements Cloneable { /** Owning Vec */ public Vec vec() { return _vec; } + /** Set the owning Vec */ + public void setVec(Vec vec) { _vec = vec; } + + /** Set the start */ + public void setStart(long start) { _start = start; } /** The Big Data. Frequently set in the subclasses, but not otherwise a publically writable field. */ byte[] _mem; /** Short-cut to the embedded big-data memory. Generally not useful for @@ -149,10 +154,15 @@ public abstract class Chunk extends Iced implements Cloneable { * pointer to this array defeats the user-mode spill-to-disk. */ public byte[] getBytes() { return _mem; } + public void setBytes(byte[] mem) { _mem = mem; } + /** Used by a ParseExceptionTest to break the Chunk invariants and trigger an * NPE. Not intended for public use. */ public final void crushBytes() { _mem=null; } + /** Used by rbind to flush the chk2 */ + public final void flushChk2() { _chk2 = null; setWrite(); } + /** Load a {@code long} value using absolute row numbers. Floating point * values are silently rounded to a long. Throws if the value is missing. * diff --git a/h2o-core/src/main/java/water/fvec/Vec.java b/h2o-core/src/main/java/water/fvec/Vec.java index <HASH>..<HASH> 100644 --- a/h2o-core/src/main/java/water/fvec/Vec.java +++ b/h2o-core/src/main/java/water/fvec/Vec.java @@ -230,7 +230,7 @@ public class Vec extends Keyed { /** Main default constructor; the caller understands Chunk layout (via the * {@code espc} array), plus enum/factor the {@code domain} (or null for * non-enums), and the Vec type. */ - Vec( Key key, long espc[], String[] domain, byte type ) { + public Vec( Key key, long espc[], String[] domain, byte type ) { super(key); assert key._kb[0]==Key.VEC; assert domain==null || type==T_ENUM; @@ -277,6 +277,8 @@ public class Vec extends Keyed { boolean writable() { return true; } /** Get the _espc long[]. */ public long[] get_espc() { return _espc; } + /** Get the column type. */ + public byte get_type() { return _type; } // ======= Create zero/constant Vecs ====== @@ -617,7 +619,7 @@ public class Vec extends Keyed { * shift-and-add math. For variable-sized chunks this is a binary search, * with a sane API (JDK has an insane API). Overridden by subclasses that * compute chunks in an alternative way, such as file-backed Vecs. */ - int elem2ChunkIdx( long i ) { + public int elem2ChunkIdx( long i ) { if( !(0 <= i && i < length()) ) throw new ArrayIndexOutOfBoundsException("0 <= "+i+" < "+length()); int lo=0, hi = nChunks(); while( lo < hi-1 ) { diff --git a/h2o-core/src/main/java/water/util/ArrayUtils.java b/h2o-core/src/main/java/water/util/ArrayUtils.java index <HASH>..<HASH> 100644 --- a/h2o-core/src/main/java/water/util/ArrayUtils.java +++ b/h2o-core/src/main/java/water/util/ArrayUtils.java @@ -127,6 +127,10 @@ public class ArrayUtils { for(long n: nums) sum+=n; return sum/nums.length; } + public static long[] add(long[] nums, long a) { + for (int i=0;i<nums.length;i++) nums[i] += a; + return nums; + } public static float[] div(float[] nums, int n) { for (int i=0; i<nums.length; i++) nums[i] /= n; return nums; @@ -449,6 +453,11 @@ public class ArrayUtils { return Arrays.copyOf(r, i); } + public static long[] join(long[] a, long[] b) { + long[] res = Arrays.copyOf(a, a.length+b.length); + System.arraycopy(b, 0, res, a.length, b.length); + return res; + } public static float [] join(float[] a, float[] b) { float[] res = Arrays.copyOf(a, a.length+b.length); System.arraycopy(b, 0, res, a.length, b.length);
chunk2elem public and create some public sets/accesses for rbind
h2oai_h2o-3
train
6976502eb2e3c484364651d8dd07810c8b155beb
diff --git a/masonite/auth/Auth.py b/masonite/auth/Auth.py index <HASH>..<HASH> 100644 --- a/masonite/auth/Auth.py +++ b/masonite/auth/Auth.py @@ -100,16 +100,16 @@ class Auth: self.request.delete_cookie('token') return self - def login_by_id(self, id): + def login_by_id(self, user_id): """Login a user by the user ID. Arguments: - id {string|int} -- The ID of the user model record. + user_id {string|int} -- The ID of the user model record. Returns: object|False -- Returns the current authenticated user object or False or None if there is none. """ - model = self.auth_model.find(id) + model = self.auth_model.find(user_id) if model: if not self._once:
Fix 🔨🔧 usage of built in id args
MasoniteFramework_masonite
train
38602b32d362758ee0edda854ae9d8a08a7347e2
diff --git a/src/React/Widgets/AnnotationEditorWidget/ManyScore/index.js b/src/React/Widgets/AnnotationEditorWidget/ManyScore/index.js index <HASH>..<HASH> 100644 --- a/src/React/Widgets/AnnotationEditorWidget/ManyScore/index.js +++ b/src/React/Widgets/AnnotationEditorWidget/ManyScore/index.js @@ -71,7 +71,7 @@ export default function manyScoreAnnotationEditorWidget(props) { </section> <section className={style.lineContainerSpaceBetween}> - <CollapsibleWidget title="Rationale" open={false}> + <CollapsibleWidget title="Rationale" open={props.rationaleOpen}> <textarea className={style.textBox} name="rationale" @@ -90,9 +90,13 @@ manyScoreAnnotationEditorWidget.propTypes = { scores: React.PropTypes.array, ranges: React.PropTypes.object, getLegend: React.PropTypes.func, + rationaleOpen: React.PropTypes.bool, onSelectionChange: React.PropTypes.func, onAnnotationChange: React.PropTypes.func, onScoreChange: React.PropTypes.func, }; +manyScoreAnnotationEditorWidget.defaultProps = { + rationaleOpen: false, +}; diff --git a/src/React/Widgets/AnnotationEditorWidget/OneScore/index.js b/src/React/Widgets/AnnotationEditorWidget/OneScore/index.js index <HASH>..<HASH> 100644 --- a/src/React/Widgets/AnnotationEditorWidget/OneScore/index.js +++ b/src/React/Widgets/AnnotationEditorWidget/OneScore/index.js @@ -55,7 +55,7 @@ export default function oneScoreAnnotationEditorWidget(props) { </section> <section className={style.lineContainerSpaceBetween}> - <CollapsibleWidget title="Rationale" open={false}> + <CollapsibleWidget title="Rationale" open={props.rationaleOpen}> <textarea className={style.textBox} name="rationale" @@ -74,8 +74,13 @@ oneScoreAnnotationEditorWidget.propTypes = { scores: React.PropTypes.array, ranges: React.PropTypes.object, getLegend: React.PropTypes.func, + rationaleOpen: React.PropTypes.bool, onSelectionChange: React.PropTypes.func, onAnnotationChange: React.PropTypes.func, onScoreChange: React.PropTypes.func, }; + +oneScoreAnnotationEditorWidget.defaultProps = { + rationaleOpen: false, +}; diff --git a/src/React/Widgets/AnnotationEditorWidget/example/index.js b/src/React/Widgets/AnnotationEditorWidget/example/index.js index <HASH>..<HASH> 100644 --- a/src/React/Widgets/AnnotationEditorWidget/example/index.js +++ b/src/React/Widgets/AnnotationEditorWidget/example/index.js @@ -50,6 +50,7 @@ function render() { scores={scores} annotation={annotation} getLegend={legendService.getLegend} + // rationaleOpen={true} onChange={(newAnnotation, save) => { annotations[idx] = newAnnotation; if (save) { diff --git a/src/React/Widgets/AnnotationEditorWidget/index.js b/src/React/Widgets/AnnotationEditorWidget/index.js index <HASH>..<HASH> 100644 --- a/src/React/Widgets/AnnotationEditorWidget/index.js +++ b/src/React/Widgets/AnnotationEditorWidget/index.js @@ -92,8 +92,10 @@ annotationEditorWidget.propTypes = { ranges: React.PropTypes.object, onChange: React.PropTypes.func, getLegend: React.PropTypes.func, + rationaleOpen: React.PropTypes.bool, }; annotationEditorWidget.defaultProps = { onChange(annotation, isEditDone) {}, + rationaleOpen: false, }; diff --git a/src/React/Widgets/AnnotationStoreEditorWidget/index.js b/src/React/Widgets/AnnotationStoreEditorWidget/index.js index <HASH>..<HASH> 100644 --- a/src/React/Widgets/AnnotationStoreEditorWidget/index.js +++ b/src/React/Widgets/AnnotationStoreEditorWidget/index.js @@ -58,6 +58,7 @@ export default function annotationStoreEditorWidget(props) { ranges={props.ranges} getLegend={props.getLegend} onChange={props.onAnnotationChange} + rationaleOpen={props.rationaleOpen} /> </section> </div> @@ -79,6 +80,7 @@ annotationStoreEditorWidget.propTypes = { scores: React.PropTypes.array, ranges: React.PropTypes.object, getLegend: React.PropTypes.func, + rationaleOpen: React.PropTypes.bool, onAnnotationChange: React.PropTypes.func, onChange: React.PropTypes.func, @@ -87,4 +89,5 @@ annotationStoreEditorWidget.propTypes = { annotationStoreEditorWidget.defaultProps = { onAnnotationChange(annotation, isEditing) {}, onChange(action, id, annotation) {}, + rationaleOpen: false, };
fix(AnnotationEditorWidget): Allow rationale to be opened Add prop to allow the rationale field to be open or closed. Default is still closed.
Kitware_paraviewweb
train
8c76693a247619450f3c7e44b61dc4e5b158d3df
diff --git a/boundary/api_call.py b/boundary/api_call.py index <HASH>..<HASH> 100644 --- a/boundary/api_call.py +++ b/boundary/api_call.py @@ -45,6 +45,7 @@ class ApiCall(object): self._api_host = "premium-api.boundary.com" self._email = None self._api_token = None + self._curl = False # All member variables related to REST CALL self._scheme = "https" @@ -95,6 +96,7 @@ class ApiCall(object): @headers.setter def headers(self, headers): self._headers = headers + # # method # @@ -214,6 +216,28 @@ class ApiCall(object): def form_url(self): return "{0}://{1}/{2}{3}".format(self._scheme, self._api_host, self._path, self._get_url_parameters()) + def _curl_output(self): + + headers = "" + if self._headers is not None: + for key in self._headers: + headers = headers + ' -H "{0}: {1}"'.format(key, self._headers[key]) + + data = None + if self._data is not None: + data = " -d '{0}'".format(self._data) + else: + data = '' + + url = ' "{0}"'.format(self.form_url()) + + print('curl -X {0} -u "{1}:{2}"{3}{4}{5}'.format(self._method, + self._email, + self._api_token, + headers, + data, + url)) + def _call_api(self): """ Make an API call to get the metric definition @@ -253,4 +277,3 @@ class ApiCall(object): if self._api_result.status_code == requests.codes.ok: result = json.loads(self._api_result.text) return result - diff --git a/boundary/api_cli.py b/boundary/api_cli.py index <HASH>..<HASH> 100755 --- a/boundary/api_cli.py +++ b/boundary/api_cli.py @@ -85,6 +85,8 @@ class ApiCli(ApiCall): metavar="api_token", help='API token for given e-mail that has access to the {0} account'.format( self.product_name)) + self.parser.add_argument('-z', '--curl', dest='curl', required=False, action='store_true', default=False, + help='Output the corresponding curl command line and exit') def _parse_args(self): """ @@ -118,6 +120,7 @@ class ApiCli(ApiCall): self._email = self.args.email if self.args.api_token is not None: self._api_token = self.args.api_token + self._curl = self.args.curl logging.debug("apihost: {0}".format(self._api_host)) logging.debug("email: {0}".format(self._email)) @@ -183,8 +186,11 @@ class ApiCli(ApiCall): self.get_api_parameters() if self._validate_arguments(): - self._call_api() - self._handle_results() + if self._curl: + self._curl_output() + else: + self._call_api() + self._handle_results() else: print(self._message)
Add option to output commands action as a curl command
boundary_pulse-api-cli
train
d897956362fbb167715c8bf31b1489adc49654cc
diff --git a/src/Symfony/Component/Filesystem/Filesystem.php b/src/Symfony/Component/Filesystem/Filesystem.php index <HASH>..<HASH> 100644 --- a/src/Symfony/Component/Filesystem/Filesystem.php +++ b/src/Symfony/Component/Filesystem/Filesystem.php @@ -301,10 +301,15 @@ class Filesystem */ public function symlink($originDir, $targetDir, $copyOnWindows = false) { - if ($copyOnWindows && !function_exists('symlink')) { - $this->mirror($originDir, $targetDir); + if ('\\' === DIRECTORY_SEPARATOR) { + $originDir = strtr($originDir, '/', '\\'); + $targetDir = strtr($targetDir, '/', '\\'); + + if ($copyOnWindows) { + $this->mirror($originDir, $targetDir); - return; + return; + } } $this->mkdir(dirname($targetDir));
Ensure backend slashes for symlinks on Windows systems Resolves: #<I>
symfony_symfony
train
54d083e649b7904cf19a03f233e7d9634a6cf658
diff --git a/domain/src/test/java/org/jboss/as/model/base/ProfileIncludeElementTestBase.java b/domain/src/test/java/org/jboss/as/model/base/ProfileIncludeElementTestBase.java index <HASH>..<HASH> 100644 --- a/domain/src/test/java/org/jboss/as/model/base/ProfileIncludeElementTestBase.java +++ b/domain/src/test/java/org/jboss/as/model/base/ProfileIncludeElementTestBase.java @@ -124,7 +124,6 @@ public abstract class ProfileIncludeElementTestBase extends DomainModelElementTe byte[] bytes = serialize(testee); ProfileIncludeElement testee1 = deserialize(bytes, ProfileIncludeElement.class); - assertEquals(testee.elementHash(), testee1.elementHash()); assertEquals(testee.getProfile(), testee1.getProfile()); } diff --git a/domain/src/test/java/org/jboss/as/model/base/util/MockAnyElement.java b/domain/src/test/java/org/jboss/as/model/base/util/MockAnyElement.java index <HASH>..<HASH> 100644 --- a/domain/src/test/java/org/jboss/as/model/base/util/MockAnyElement.java +++ b/domain/src/test/java/org/jboss/as/model/base/util/MockAnyElement.java @@ -6,8 +6,10 @@ package org.jboss.as.model.base.util; import javax.xml.namespace.QName; import javax.xml.stream.XMLStreamException; +import java.util.List; import org.jboss.as.model.AbstractModelElement; import org.jboss.as.model.AbstractSubsystemElement; +import org.jboss.as.model.AbstractSubsystemUpdate; import org.jboss.as.model.ParseUtils; import org.jboss.staxmapper.XMLExtendedStreamReader; import org.jboss.staxmapper.XMLExtendedStreamWriter; @@ -46,7 +48,7 @@ public class MockAnyElement extends AbstractSubsystemElement<MockAnyElement> { * @throws XMLStreamException if an error occurs */ public MockAnyElement(XMLExtendedStreamReader reader) throws XMLStreamException { - super(reader); + super(NAMESPACE); ParseUtils.requireNoAttributes(reader); ParseUtils.requireNoContent(reader); } @@ -57,12 +59,14 @@ public class MockAnyElement extends AbstractSubsystemElement<MockAnyElement> { } @Override - private long elementHash() { - return 19; - } - - @Override public void writeContent(XMLExtendedStreamWriter streamWriter) throws XMLStreamException { streamWriter.writeEndElement(); } + + protected void getClearingUpdates(final List<? super AbstractSubsystemUpdate<MockAnyElement, ?>> objects) { + } + + protected boolean isEmpty() { + return true; + } }
Last fixes to get domain compiling
wildfly_wildfly
train
817695305e73173921a9d1f2a6ef49af61e1e0cb
diff --git a/dvc/stage.py b/dvc/stage.py index <HASH>..<HASH> 100644 --- a/dvc/stage.py +++ b/dvc/stage.py @@ -162,7 +162,7 @@ class Stage(object): outs = [x.dumpd(self.cwd) for x in self.outs] ret = {} - if self.cmd: + if self.cmd != None: ret[Stage.PARAM_CMD] = self.cmd if len(deps):
stage: write cmd only if it is not None
iterative_dvc
train
5b8b4691e44ef15026c15d49a64916132677e9d1
diff --git a/visidata/aggregators.py b/visidata/aggregators.py index <HASH>..<HASH> 100644 --- a/visidata/aggregators.py +++ b/visidata/aggregators.py @@ -47,7 +47,7 @@ def aggregator(name, func, *args, type=None): return None return e - aggregators[name] = _defaggr(name, type, _func) + aggregators[name] = [_defaggr(name, type, _func)] ## specific aggregator implementations @@ -118,9 +118,7 @@ ColumnsSheet.columns += [ def addAggregators(cols, aggrnames): 'add aggregator for each aggrname to each of cols' for aggrname in aggrnames: - aggrs = aggregators.get(aggrname) - aggrs = aggrs if isinstance(aggrs, list) else [aggrs] - for aggr in aggrs: + for aggr in aggregators.get(aggrname): for c in cols: if not hasattr(c, 'aggregators'): c.aggregators = []
[aggregators] make aggregators value always a list
saulpw_visidata
train
d165d01922e08a9f671871bda5dcb5f614ef134a
diff --git a/src/map/tool/DrawTool.js b/src/map/tool/DrawTool.js index <HASH>..<HASH> 100644 --- a/src/map/tool/DrawTool.js +++ b/src/map/tool/DrawTool.js @@ -240,7 +240,7 @@ Z.DrawTool = Z.MapTool.extend(/** @lends maptalks.DrawTool.prototype */{ } //这一行代码取消注释后, 会造成dblclick无法响应, 可能是存在循环调用,造成浏览器无法正常响应事件 this._setLonlats(path); - + param['geometry'] = this.getMode() === 'polygon' ? path.length >= 3 ? new Z.Polygon(path) : new Z.LineString(path) : new Z.LineString(path); /** * drawvertex event. * @@ -276,7 +276,8 @@ Z.DrawTool = Z.MapTool.extend(/** @lends maptalks.DrawTool.prototype */{ } else { this._movingTail.setCoordinates(tailPath); } - param['geometry'] = this._geometry; + path = path.concat([coordinate]); + param['geometry'] = this.getMode() === 'polygon' ? path.length >= 3 ? new Z.Polygon(path) : new Z.LineString(path) : new Z.LineString(path); /** * mousemove event. * @@ -405,6 +406,8 @@ Z.DrawTool = Z.MapTool.extend(/** @lends maptalks.DrawTool.prototype */{ if (!this._isValidContainerPoint(current)) { return false; } var coordinate = this._containerPointToLonlat(current); genGeometry(coordinate); + param['geometry'] = this._geometry; + this._fireEvent('mousemove', param); return false; } var onMouseUp = function (_event) {
add geometry in mousemove event of drawtool
maptalks_maptalks.js
train
53bd3f3d0635c43d39d72e9d28e2be909635ef88
diff --git a/test/APITest.php b/test/APITest.php index <HASH>..<HASH> 100644 --- a/test/APITest.php +++ b/test/APITest.php @@ -5,6 +5,7 @@ */ require_once(dirname(__FILE__) . '/../lib/API.php'); +require_once(dirname(__FILE__) . '/../lib/Error.php'); require_once 'PHPUnit/Autoload.php'; class APITestCase extends PHPUnit_Framework_TestCase
Error class is included in PHPUnit test
sendwithus_sendwithus_php
train
58dc9bcd144a5a5d4afbad41f2454df77ad7023e
diff --git a/library/src/com/handmark/pulltorefresh/library/PullToRefreshAdapterViewBase.java b/library/src/com/handmark/pulltorefresh/library/PullToRefreshAdapterViewBase.java index <HASH>..<HASH> 100644 --- a/library/src/com/handmark/pulltorefresh/library/PullToRefreshAdapterViewBase.java +++ b/library/src/com/handmark/pulltorefresh/library/PullToRefreshAdapterViewBase.java @@ -27,8 +27,11 @@ import android.view.ViewParent; import android.widget.AbsListView; import android.widget.AbsListView.OnScrollListener; import android.widget.Adapter; +import android.widget.AdapterView; +import android.widget.AdapterView.OnItemClickListener; import android.widget.FrameLayout; import android.widget.LinearLayout; +import android.widget.ListAdapter; import com.handmark.pulltorefresh.library.internal.EmptyViewMethodAccessor; import com.handmark.pulltorefresh.library.internal.IndicatorLayout; @@ -123,8 +126,7 @@ public abstract class PullToRefreshAdapterViewBase<T extends AbsListView> extend mOnScrollListener.onScrollStateChanged(view, scrollState); } } - - + @Override protected void onScrollChanged(int l, int t, int oldl, int oldt) { super.onScrollChanged(l, t, oldl, oldt); @@ -134,6 +136,28 @@ public abstract class PullToRefreshAdapterViewBase<T extends AbsListView> extend } /** + * Pass-through method for {@link PullToRefreshBase#getRefreshableView() + * getRefreshableView()}.{@link AdapterView#setAdapter(ListAdapter) + * setAdapter(adapter)}. This is just for convenience! + * + * @param adapter - Adapter to set + */ + public void setAdapter(ListAdapter adapter) { + ((AdapterView<ListAdapter>) mRefreshableView).setAdapter(adapter); + } + + /** + * Pass-through method for {@link PullToRefreshBase#getRefreshableView() + * getRefreshableView()}.{@link AdapterView#setOnItemClickListener(OnItemClickListener) + * setOnItemClickListener(listener)}. This is just for convenience! + * + * @param listener - OnItemClickListener to use + */ + public void setOnItemClickListener(OnItemClickListener listener) { + mRefreshableView.setOnItemClickListener(listener); + } + + /** * Sets the Empty View to be used by the Adapter View. * * We need it handle it ourselves so that we can Pull-to-Refresh when the @@ -174,7 +198,7 @@ public abstract class PullToRefreshAdapterViewBase<T extends AbsListView> extend mEmptyView = newEmptyView; } } - + public final void setScrollEmptyView(boolean doScroll) { mScrollEmptyView = doScroll; } @@ -336,7 +360,7 @@ public abstract class PullToRefreshAdapterViewBase<T extends AbsListView> extend Log.d(LOG_TAG, "isFirstItemVisible. Empty View."); } return true; - + } else if (mRefreshableView.getFirstVisiblePosition() == 0) { final View firstVisibleChild = mRefreshableView.getChildAt(0); if (firstVisibleChild != null) { diff --git a/sample/src/com/handmark/pulltorefresh/samples/PullToRefreshListActivity.java b/sample/src/com/handmark/pulltorefresh/samples/PullToRefreshListActivity.java index <HASH>..<HASH> 100755 --- a/sample/src/com/handmark/pulltorefresh/samples/PullToRefreshListActivity.java +++ b/sample/src/com/handmark/pulltorefresh/samples/PullToRefreshListActivity.java @@ -81,7 +81,8 @@ public final class PullToRefreshListActivity extends ListActivity { mAdapter = new ArrayAdapter<String>(this, android.R.layout.simple_list_item_1, mListItems); - // You can also just use setListAdapter(mAdapter) + // You can also just use setListAdapter(mAdapter) or + // mPullRefreshListView.setAdapter(mAdapter) actualListView.setAdapter(mAdapter); }
Add convenience passthrough for methods for setAdapter and setOnItemClickListener. Sick of getting GitHub issues for this.
chrisbanes_Android-PullToRefresh
train
87acb738412d1462ec7192b5fb33f6ffc4e84cd0
diff --git a/slither/slithir/convert.py b/slither/slithir/convert.py index <HASH>..<HASH> 100644 --- a/slither/slithir/convert.py +++ b/slither/slithir/convert.py @@ -511,10 +511,21 @@ def apply_ir_heuristics(irs, node): # irs = replace_calls(irs) irs = remove_unused(irs) + find_references_origin(irs) + reset_variable_number(irs) return irs +def find_references_origin(irs): + """ + Make lvalue of each Index, Member operation + points to the left variable + """ + for ir in irs: + if isinstance(ir, (Index, Member)): + ir.lvalue.points_to = ir.variable_left + def reset_variable_number(result): """ Reset the number associated to slithIR variables diff --git a/slither/slithir/variables/reference.py b/slither/slithir/variables/reference.py index <HASH>..<HASH> 100644 --- a/slither/slithir/variables/reference.py +++ b/slither/slithir/variables/reference.py @@ -1,6 +1,8 @@ -from slither.core.variables.variable import Variable from slither.core.children.child_node import ChildNode +from slither.core.declarations import Contract, Enum, SolidityVariableComposed +from slither.core.variables.variable import Variable + class ReferenceVariable(ChildNode, Variable): @@ -10,6 +12,7 @@ class ReferenceVariable(ChildNode, Variable): super(ReferenceVariable, self).__init__() self._index = ReferenceVariable.COUNTER ReferenceVariable.COUNTER += 1 + self._points_to = None @property def index(self): @@ -20,6 +23,25 @@ class ReferenceVariable(ChildNode, Variable): self._index = idx @property + def points_to(self): + """ + Return the variable pointer by the reference + It is the left member of a Index or Member operator + """ + return self._points_to + + @points_to.setter + def points_to(self, points_to): + # Can only be a rvalue of + # Member or Index operator + from slither.slithir.utils.utils import is_valid_lvalue + assert is_valid_lvalue(points_to) \ + or points_to == SolidityVariableComposed('msg.data') \ + or isinstance(points_to, (Contract, Enum)) + + self._points_to = points_to + + @property def name(self): return 'REF_{}'.format(self.index)
SlithIR: add points_to property to ReferenceVariable
crytic_slither
train
90538306e3aab016e2791aea6e6c4a0be4431e6f
diff --git a/lib/ostatus/atom.js b/lib/ostatus/atom.js index <HASH>..<HASH> 100644 --- a/lib/ostatus/atom.js +++ b/lib/ostatus/atom.js @@ -39,6 +39,7 @@ function render(updates, profile, callback) { var context = profile; context.updates = updates; context.host = host; + if (updates && updates.length > 0) context.updated = updates[0].updated; Mu.render(parsed,context) .on('data', function (c) { buffer += c.toString(); }) .on('end', function () {callback(null, buffer);}) diff --git a/lib/ostatus/http.js b/lib/ostatus/http.js index <HASH>..<HASH> 100644 --- a/lib/ostatus/http.js +++ b/lib/ostatus/http.js @@ -68,7 +68,7 @@ function post(url, reqBody, headers, callback) { var port = secure ? 443 : 80; headers["Host"] = host; - headers["Content-Length"] = reqBody.length; + headers["Content-Length"] = reqBody.length + 1; if (url.search != undefined) path += url.search; diff --git a/lib/ostatus/push.js b/lib/ostatus/push.js index <HASH>..<HASH> 100644 --- a/lib/ostatus/push.js +++ b/lib/ostatus/push.js @@ -81,7 +81,7 @@ function sign(data, secret) { function distribute(data, url, secret, callback) { var headers = {"Content-Type": "application/atom-xml"}; - + console.log("Data: ===" + data + "==="); if (secret != undefined) { var digest = "sha1="+ sign(data, secret); headers["X-Hub-Signature"] = digest; @@ -91,7 +91,7 @@ function distribute(data, url, secret, callback) { Http.post(url, data, headers, function(err, response, body) { if (err) return callback(err); if (response.statusCode >= 200 && response.statusCode < 300) { - callback(null, body); + callback(null, response.statusCode, body); } else { callback(new Error("Push distribute returned HTTP Status " + response.statusCode)); } diff --git a/lib/ostatus/templates/updates.xml.mu b/lib/ostatus/templates/updates.xml.mu index <HASH>..<HASH> 100644 --- a/lib/ostatus/templates/updates.xml.mu +++ b/lib/ostatus/templates/updates.xml.mu @@ -2,7 +2,7 @@ <feed xml:lang="en-US" xmlns="http://www.w3.org/2005/Atom" xmlns:thr="http://purl.org/syndication/thread/1.0" xmlns:georss="http://www.georss.org/georss" xmlns:activity="http://activitystrea.ms/spec/1.0/" xmlns:media="http://purl.org/syndication/atommedia" xmlns:poco="http://portablecontacts.net/spec/1.0" xmlns:ostatus="http://ostatus.org/schema/1.0" xmlns:statusnet="http://status.net/schema/api/1/"> <id>http://{{host}}/updates/{{username}}.atom</id> <title>Latest updates from {{fullname}}</title> - <updated>2011-01-10T21:29:19+00:00</updated> + <updated>{{updated}}</updated> <link rel="alternate" href="http://{{host}}/users/{{username}}" type="text/html"/> <link rel="hub" href="http://{{host}}/push/hub" /> <link rel="salmon" href="http://{{host}}/salmon/user/{{username}}" />
Various fixes, too lazy to split my commit
eschnou_node-ostatus
train
0bcb6e0d8e5f618f6a3b7afb2794f83b4faa84ba
diff --git a/modeshape-jcr/src/test/java/org/modeshape/jcr/ConnectorChangesTest.java b/modeshape-jcr/src/test/java/org/modeshape/jcr/ConnectorChangesTest.java index <HASH>..<HASH> 100644 --- a/modeshape-jcr/src/test/java/org/modeshape/jcr/ConnectorChangesTest.java +++ b/modeshape-jcr/src/test/java/org/modeshape/jcr/ConnectorChangesTest.java @@ -56,12 +56,12 @@ public class ConnectorChangesTest extends SingleUseAbstractTest { @Test public void testChangesEmittedWhenNodeCreated() throws Exception { logger.debug("Executing testChangesEmittedWhenNodeCreated()..."); - + /* hmm I guess the deletion is there to remove previously generated state, so I'll leave it in */ FileUtil.delete("target/federation_persistent_repository"); /* but for the tests to run the following directory must exists it seems */ new File("target/federation_persistent_repository/store/persistentRepository").mkdirs(); - + final Session session = session(); final Node root = session.getRootNode(); logger.debug("Root node is: "); @@ -69,15 +69,17 @@ public class ConnectorChangesTest extends SingleUseAbstractTest { final Node federation = session.getNode("/federation"); federation.addNode("testNode"); session.save(); - + /* since the event need some time to propagate to the listener, we'll retry three times */ int tries = 0; - while (tries++ < 3){ - long wait = 100l + (tries * tries * 1000l); //1st: 100ms, 2nd: 1100ms, 3rd: 4100ms - Thread.sleep(wait); - if (listener.receivedChangeSet.size() > 0){ + while (tries++ < 3) { + long wait = 100l + (tries * tries * 1000l); // 1st: 100ms, 2nd: 1100ms, 3rd: 4100ms + synchronized (this) { + Thread.sleep(wait); + } + if (listener.receivedChangeSet.size() > 0) { break; - }else{ + } else { logger.debug("No event after " + wait + "ms received."); } }
MODE-<I> Added synchronized block around Thread.sleep
ModeShape_modeshape
train
21bd040473b8b05908cec822f5aae0d256539847
diff --git a/src/main/java/org/paumard/spliterators/RepeatingSpliterator.java b/src/main/java/org/paumard/spliterators/RepeatingSpliterator.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/paumard/spliterators/RepeatingSpliterator.java +++ b/src/main/java/org/paumard/spliterators/RepeatingSpliterator.java @@ -73,6 +73,6 @@ public class RepeatingSpliterator<E> implements Spliterator<E> { @Override public int characteristics() { - return this.spliterator.characteristics() | Spliterator.ORDERED; + return this.spliterator.characteristics() & ~Spliterator.SORTED | Spliterator.ORDERED; } } diff --git a/src/test/java/org/paumard/spliterators/RepeatingSpliteratorTest.java b/src/test/java/org/paumard/spliterators/RepeatingSpliteratorTest.java index <HASH>..<HASH> 100644 --- a/src/test/java/org/paumard/spliterators/RepeatingSpliteratorTest.java +++ b/src/test/java/org/paumard/spliterators/RepeatingSpliteratorTest.java @@ -19,7 +19,8 @@ package org.paumard.spliterators; import org.paumard.streams.StreamsUtils; import org.testng.annotations.Test; -import java.util.List; +import java.util.*; +import java.util.function.Predicate; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -57,6 +58,19 @@ public class RepeatingSpliteratorTest { assertThat(result).containsExactly("a", "a", "b", "b", "c", "c"); } + @Test + public void should_repeat_a_sorted_stream_correctly_and_in_an_unsorted_stream() { + // Given + SortedSet<String> sortedSet = new TreeSet<>(Arrays.asList("a", "b", "c")); + int repeating = 2; + + // When + Stream<String> stream = StreamsUtils.repeat(sortedSet.stream(), repeating); + + // Then + assertThat(stream.spliterator().characteristics() & Spliterator.SORTED).isEqualTo(0); + } + @Test(expectedExceptions = NullPointerException.class) public void should_not_build_a_repeating_spliterator_on_a_null_spliterator() {
Fixed the repeating operator in the case of a SORTED stream
JosePaumard_streams-utils
train
2e179ad71d1b9a2ef70e0d674d24b19859a99054
diff --git a/lib/strainer/runner.rb b/lib/strainer/runner.rb index <HASH>..<HASH> 100644 --- a/lib/strainer/runner.rb +++ b/lib/strainer/runner.rb @@ -10,6 +10,8 @@ module Strainer extend Equivalence equivalence :@cookbook_names, :@options + attr_reader :cookbooks + # Creates a Strainer runner # # @param [Array<String>] cookbook_names @@ -22,15 +24,19 @@ module Strainer @cookbook_names = cookbook_names @options = options @sandbox = Strainer::Sandbox.new(cookbook_names, options) - @cookbooks = @sandbox.cookbooks @report = {} + @cookbooks = {} + + load_strainerfiles end # Runs the Strainer runner def run - @cookbooks.each do |cookbook| + @cookbooks.each do |name, c| + cookbook = c[:cookbook] + strainerfile = c[:strainerfile] + Strainer.ui.debug "Starting Runner for #{cookbook.cookbook_name} (#{cookbook.version})" - strainerfile = Strainer::Strainerfile.for(cookbook, @options) Strainer.ui.header("# Straining '#{cookbook.cookbook_name} (v#{cookbook.version})'") strainerfile.commands.each do |command| @@ -50,5 +56,15 @@ module Strainer abort unless @report.values.collect(&:values).flatten.all? end + + private + def load_strainerfiles + @sandbox.cookbooks.each do |cookbook| + @cookbooks[cookbook.name] = { + cookbook: cookbook, + strainerfile: Strainer::Strainerfile.for(cookbook, @options) + } + end + end end end diff --git a/lib/strainer/strainerfile.rb b/lib/strainer/strainerfile.rb index <HASH>..<HASH> 100644 --- a/lib/strainer/strainerfile.rb +++ b/lib/strainer/strainerfile.rb @@ -52,6 +52,13 @@ module Strainer end end + # Reloads the Strainerfile from disk + def reload! + @all_commands = nil + @commands = nil + load! + end + private # Parse the given Strainerfile def load!
Load the Strainerfile for each cookbook on initialize instead of during the run
customink_strainer
train
cceb41e45ef92d7e65b5df19e3679282391e9aae
diff --git a/test/unit/autoNumeric.spec.js b/test/unit/autoNumeric.spec.js index <HASH>..<HASH> 100644 --- a/test/unit/autoNumeric.spec.js +++ b/test/unit/autoNumeric.spec.js @@ -36,11 +36,11 @@ import $ from '../../node_modules/jquery/dist/jquery'; import an from '../../src/autoNumeric'; // Default Jasmine test to make sure the test framework works -xdescribe('A test suite', () => { - it('contains a spec with an expectation', () => { - expect(true).toBe(true); - }); -}); +// describe('A test suite', () => { +// it('contains a spec with an expectation', () => { +// expect(true).toBe(true); +// }); +// }); // The autoNumeric tests :
Comment out the default Jasmine test in order to see a <I>% success without any skipped tests.
autoNumeric_autoNumeric
train
6bbee97832df068bbf642354c82fdd1dd505107b
diff --git a/web/concrete/blocks/image/controller.php b/web/concrete/blocks/image/controller.php index <HASH>..<HASH> 100644 --- a/web/concrete/blocks/image/controller.php +++ b/web/concrete/blocks/image/controller.php @@ -104,7 +104,7 @@ class Controller extends BlockController { function getLinkURL() { if (!empty($this->externalLink)) { $sec = Core::make('helper/security'); - return htmlentities($sec->sanitizeURL($this->externalLink)); + return $sec->sanitizeURL($this->externalLink); } else if (!empty($this->internalLinkCID)) { $linkToC = Page::getByID($this->internalLinkCID); return (empty($linkToC) || $linkToC->error) ? '' : Loader::helper('navigation')->getLinkToCollection($linkToC);
removing htmlentities Former-commit-id: 3db<I>c<I>e<I>f8e1a5b<I>be3d9d3cdac<I>c
concrete5_concrete5
train
adb18663b63fb201c89ce62c88429b6eb9dff0c1
diff --git a/lib/mongoid/errors/invalid_options.rb b/lib/mongoid/errors/invalid_options.rb index <HASH>..<HASH> 100644 --- a/lib/mongoid/errors/invalid_options.rb +++ b/lib/mongoid/errors/invalid_options.rb @@ -19,7 +19,7 @@ module Mongoid #:nodoc super( translate( "invalid_options", - { :name => name, :invalid => invalid, :valid => valid } + { :name => name, :invalid => invalid, :valid => valid.join(', ') } ) ) end
made InvalidOptions exception readable
mongodb_mongoid
train
30bfac6bfe522cba42d2e8feb4e829ea0cbe2e39
diff --git a/lib/rack/utf8_sanitizer.rb b/lib/rack/utf8_sanitizer.rb index <HASH>..<HASH> 100644 --- a/lib/rack/utf8_sanitizer.rb +++ b/lib/rack/utf8_sanitizer.rb @@ -1,9 +1,12 @@ # encoding: ascii-8bit require 'uri' +require 'stringio' module Rack class UTF8Sanitizer + StringIO = ::StringIO + def initialize(app) @app = app end @@ -20,7 +23,20 @@ module Rack HTTP_REFERER ) + SANITIZABLE_CONTENT_TYPES = %w( + text/plain + application/x-www-form-urlencoded + ) + + # MRI-optimization + POST = 'POST' + PUT = 'PUT' + def sanitize(env) + request_method = env['REQUEST_METHOD'] + if request_method == POST || request_method == PUT + sanitize_rack_input(env) + end env.each do |key, value| if URI_FIELDS.include?(key) env[key] = transfer_frozen(value, @@ -36,6 +52,47 @@ module Rack protected + def sanitize_rack_input(env) + # https://github.com/rack/rack/blob/master/lib/rack/request.rb#L42 + # Logic borrowed from Rack::Request#media_type,#media_type_params,#content_charset + # Ignoring charset in content type. + content_type = env['CONTENT_TYPE'].to_s.split(/\s*[;,]\s*/, 2).first.downcase + return unless SANITIZABLE_CONTENT_TYPES.any? {|type| content_type == type } + env['rack.input'] &&= sanitize_io(env['rack.input']) + end + + # Modeled after Rack::RewindableInput + # TODO: Should this delegate any methods to the original io? + class SanitizedRackInput + def initialize(original_io, sanitized_io) + @original_io = original_io + @sanitized_io = sanitized_io + end + def gets + @sanitized_io.gets + end + def read(*args) + @sanitized_io.read(*args) + end + def each(&block) + @sanitized_io.each(&block) + end + def rewind + @sanitized_io.rewind + end + def close + @sanitized_io.close + end + end + + def sanitize_io(io) + input = io.read + io.close + sanitized_io = transfer_frozen(input, + sanitize_string(input)) + SanitizedRackInput.new(io, StringIO.new(sanitized_io)) + end + # URI.encode/decode expect the input to be in ASCII-8BIT. # However, there could be invalid UTF-8 characters both in # raw and percent-encoded form. diff --git a/test/test_utf8_sanitizer.rb b/test/test_utf8_sanitizer.rb index <HASH>..<HASH> 100644 --- a/test/test_utf8_sanitizer.rb +++ b/test/test_utf8_sanitizer.rb @@ -151,4 +151,66 @@ describe Rack::UTF8Sanitizer do env["REQUEST_PATH"].should.be.frozen end end + + describe "with form data" do + def sanitize_form_data + @plain_input = "foo bar лол".force_encoding('UTF-8') + @uri_input = "http://bar/foo+%2F%3A+bar+%D0%BB%D0%BE%D0%BB".force_encoding('UTF-8') + env = @app.({ + "REQUEST_METHOD" => "POST", + "CONTENT_TYPE" => "application/x-www-form-urlencoded;foo=bar", + "HTTP_USER_AGENT" => @plain_input, + "rack.input" => @rack_input, + }) + sanitized_input = env['rack.input'].read + sanitized_input.encoding.should == Encoding::UTF_8 + sanitized_input.should.be.valid_encoding + yield sanitized_input if block_given? + env['rack.input'].rewind + behaves_like :does_sanitize_plain + behaves_like :does_sanitize_uri + behaves_like :identity_plain + behaves_like :identity_uri + env['rack.input'].close + end + + it "sanitizes StringIO rack.input" do + input = "foo=bla&quux=bar" + @rack_input = StringIO.new input + + sanitize_form_data do |sanitized_input| + sanitized_input.should == input + end + end + + it "sanitizes StringIO rack.input with bad encoding" do + input = "foo=bla&quux=bar\xED" + @rack_input = StringIO.new input + + sanitize_form_data do |sanitized_input| + sanitized_input.should != input + end + end + + it "sanitizes non-StringIO rack.input" do + require 'rack/rewindable_input' + input = "foo=bla&quux=bar" + @rack_input = Rack::RewindableInput.new(StringIO.new(input)) + + sanitize_form_data do |sanitized_input| + sanitized_input.should == input + end + end + + it "sanitizes non-StringIO rack.input with bad encoding" do + require 'rack/rewindable_input' + input = "foo=bla&quux=bar\xED" + @rack_input = Rack::RewindableInput.new(StringIO.new(input)) + + sanitize_form_data do |sanitized_input| + sanitized_input.should != input + end + end + + end end
Sanitize form posts; wrap rack.input in SanitizedRackInput
whitequark_rack-utf8_sanitizer
train
c6ad0b1bfdcdfe1615afef0733b3c34f1ebdc32d
diff --git a/src/Valkyrja/View/Engines/PHPEngine.php b/src/Valkyrja/View/Engines/PHPEngine.php index <HASH>..<HASH> 100644 --- a/src/Valkyrja/View/Engines/PHPEngine.php +++ b/src/Valkyrja/View/Engines/PHPEngine.php @@ -13,6 +13,7 @@ declare(strict_types=1); namespace Valkyrja\View\Engines; +use RuntimeException; use Valkyrja\Support\Directory; use Valkyrja\View\Engine; use Valkyrja\View\Exceptions\InvalidConfigPath; @@ -25,7 +26,7 @@ use function ob_start; use function strpos; use function trim; -use const EXTR_OVERWRITE; +use const EXTR_SKIP; /** * Class PHPEngine. @@ -117,16 +118,34 @@ class PHPEngine implements Engine */ protected function renderFullPath(string $path, array $variables = []): string { - extract($variables, EXTR_OVERWRITE); - $this->startRender(); - - include $path; + $this->requirePath($path, $variables); return $this->endRender(); } /** + * Require a path to generate its contents with provided variables. + * + * @param string $path The file path + * @param array $variables [optional] The variables + * + * @return void + */ + protected function requirePath(string $path, array $variables = []): void + { + if (is_file($path)) { + extract($variables, EXTR_SKIP); + + require $path; + + return; + } + + throw new RuntimeException("Path does not exist at {$path}"); + } + + /** * Get the full path for a given template name. * * @param string $template The template diff --git a/src/Valkyrja/View/Templates/Template.php b/src/Valkyrja/View/Templates/Template.php index <HASH>..<HASH> 100644 --- a/src/Valkyrja/View/Templates/Template.php +++ b/src/Valkyrja/View/Templates/Template.php @@ -199,7 +199,7 @@ class Template implements Contract */ public function escape($value): string { - $value = mb_convert_encoding((string) $value, 'UTF-8', 'UTF-8'); + $value = mb_convert_encoding((string)$value, 'UTF-8', 'UTF-8'); $value = htmlentities($value, ENT_QUOTES, 'UTF-8'); return $value; @@ -349,10 +349,10 @@ class Template implements Contract protected function renderFile(string $name, array $variables = [], bool $renderLayout = false): string { // Set the variables with the new variables and this view instance - $this->variables = array_merge($this->variables, $variables, ['template' => $this]); + $variables = array_merge($this->variables, $variables, ['template' => $this]); // Render the template - $template = $this->renderTemplate($name); + $template = $this->renderTemplate($name, $variables); // Check if a layout has been set if (null === $this->layout || ! $renderLayout) { @@ -362,27 +362,28 @@ class Template implements Contract // Begin tracking layout changes for recursive layout $this->trackLayoutChanges = true; - return $this->renderLayout($this->layout); + return $this->renderLayout($this->layout, $variables); } /** * Render a layout. * - * @param string $layout The layout + * @param string $layout The layout + * @param array $variables [optional] The variables to set * * @return string */ - protected function renderLayout(string $layout): string + protected function renderLayout(string $layout, array $variables = []): string { // Render the layout - $renderedLayout = $this->renderTemplate($layout); + $renderedLayout = $this->renderTemplate($layout, $variables); // Check if the layout has changed if ($this->trackLayoutChanges && $this->hasLayoutChanged && null !== $this->layout) { // Reset the flag $this->hasLayoutChanged = false; // Render the new layout - $renderedLayout = $this->renderLayout($this->layout); + $renderedLayout = $this->renderLayout($this->layout, $variables); } return $renderedLayout;
View: Fix bugs with partials and templates rendering.
valkyrjaio_valkyrja
train
720e45c047dbcf169f2b29696b65a7d4c557f8f3
diff --git a/test/test.js b/test/test.js index <HASH>..<HASH> 100644 --- a/test/test.js +++ b/test/test.js @@ -81,7 +81,6 @@ describe("phantom html to pdf", function () { fs.unlinkSync(filePath); } catch (e) { } - ; } } };
Adding empty endline to test file
pofider_phantom-html-to-pdf
train
bcf1e814624732bce4c876467cc0275891f26bc8
diff --git a/configs/webpack.config.js b/configs/webpack.config.js index <HASH>..<HASH> 100644 --- a/configs/webpack.config.js +++ b/configs/webpack.config.js @@ -40,16 +40,20 @@ const preprocessor = production => ({ }); const ifdef = config => (context, { addLoader }) => - addLoader({ - test: /\.tsx?/, - use: [ + addLoader( + Object.assign( { - loader: 'awesome-typescript-loader' + test: /\.tsx?/, + use: [ + { + loader: 'awesome-typescript-loader' + }, + { loader: 'ifdef-loader?' + JSON.stringify(config) } + ] }, - { loader: 'ifdef-loader?' + JSON.stringify(config) } - ], - ...context.match - }); + context.match + ) + ); const makeTs = prod => match(
Fix webpack config for Node 6
cyclejs-community_one-fits-all
train
d66c22a1f1a7597d57cd85b0b511670189cca5b8
diff --git a/machina/apps/conversation/abstract_models.py b/machina/apps/conversation/abstract_models.py index <HASH>..<HASH> 100644 --- a/machina/apps/conversation/abstract_models.py +++ b/machina/apps/conversation/abstract_models.py @@ -4,6 +4,7 @@ from __future__ import unicode_literals # Third party imports +from django.core.exceptions import ValidationError from django.db import models from django.utils.encoding import python_2_unicode_compatible from django.utils.translation import ugettext_lazy as _ @@ -90,6 +91,11 @@ class AbstractTopic(DatedModel): self._last_post = posts[0] if posts.exists() else None return self._last_post + def clean(self): + super(AbstractTopic, self).clean() + if self.forum.is_category or self.forum.is_link: + raise ValidationError(_('A topic can not be associated with a category or a link forum')) + def delete(self, using=None): super(AbstractTopic, self).delete(using) self.forum.update_trackers() diff --git a/machina/apps/forum/abstract_models.py b/machina/apps/forum/abstract_models.py index <HASH>..<HASH> 100644 --- a/machina/apps/forum/abstract_models.py +++ b/machina/apps/forum/abstract_models.py @@ -82,6 +82,18 @@ class AbstractForum(MPTTModel, ActiveModel): """ return self.level * 2 + @property + def is_category(self): + return self.type == FORUM_TYPES.forum_cat + + @property + def is_forum(self): + return self.type == FORUM_TYPES.forum_post + + @property + def is_link(self): + return self.type == FORUM_TYPES.forum_link + def clean(self): if self.parent: if self.parent.type == FORUM_TYPES.forum_link: @@ -89,10 +101,52 @@ class AbstractForum(MPTTModel, ActiveModel): super(AbstractForum, self).clean() + def save(self, *args, **kwargs): + # It is vital to track the changes of the parent associated with a forum in order to + # maintain counters up-to-date and to trigger other operations such as permissions updates. + old_instance = None + if self.pk: + old_instance = self.__class__._default_manager.get(pk=self.pk) + + # Do the save + super(AbstractForum, self).save(*args, **kwargs) + + # If any change has been made to the forum parent, trigger the update of the counters + if old_instance and old_instance.parent != self.parent: + self.update_trackers() + # TODO: trigger a 'post_forum_parent_update' signal + + def _simple_save(self, *args, **kwargs): + """ + Calls the parent save method in order to avoid the checks for forum parent changes + which can result in triggering a new update of the counters associated with the + current forum. + This allow the database to not be hit by such checks during very common and regular + operations such as those provided by the update_tracker function; indeed these operations + will never result in an update of a forum parent. + """ + super(AbstractForum, self).save(*args, **kwargs) + def update_trackers(self): - self.real_topics_count = self.topics.count() - self.topics_count = self.topics.filter(approved=True).count() + # Fetch the list of ids of all descendant forums including the current one + forum_ids = self.get_descendants(include_self=True).values_list('id', flat=True) + + # Determine the list of the associated topics, that is the list of topics + # associated with the current forum plus the list of all topics associated + # with the descendant forums. + Topic = models.get_model('conversation', 'Topic') + topics = Topic.objects.filter(forum__id__in=forum_ids) + + self.real_topics_count = topics.count() + self.topics_count = topics.filter(approved=True).count() # Compute the forum level posts count - posts_count = sum(topic.posts_count for topic in self.topics.all()) + posts_count = sum(topic.posts_count for topic in topics) self.posts_count = posts_count - self.save() + + # Any save of a forum triggered from the update_tracker process will not result + # in checking for a change of the forum's parent. + self._simple_save() + + # Trigger the parent trackers update if necessary + if self.parent: + self.parent.update_trackers()
Clean and save processes updated for forum and topic
ellmetha_django-machina
train
5739ba4d16cc05df33dbeccdd777e6b30d7eb8b1
diff --git a/suite/suite.go b/suite/suite.go index <HASH>..<HASH> 100644 --- a/suite/suite.go +++ b/suite/suite.go @@ -1,9 +1,9 @@ package suite import ( - "testing" "reflect" "regexp" + "testing" ) // Suite is a basic testing suite with methods for storing and @@ -32,19 +32,33 @@ func Run(t *testing.T, suite TestingSuite) { } methodFinder := reflect.TypeOf(suite) + tests := []testing.InternalTest{} for index := 0; index < methodFinder.NumMethod(); index++ { method := methodFinder.Method(index) if ok, _ := regexp.MatchString("^Test", method.Name); ok { - if setupTestSuite, ok := suite.(SetupTestSuite); ok { - setupTestSuite.SetupTest() - } - method.Func.Call([]reflect.Value{reflect.ValueOf(suite)}) - if tearDownTestSuite, ok := suite.(TearDownTestSuite); ok { - tearDownTestSuite.TearDownTest() + test := testing.InternalTest{ + Name: method.Name, + F: func(t *testing.T) { + suite.SetT(t) + if setupTestSuite, ok := suite.(SetupTestSuite); ok { + setupTestSuite.SetupTest() + } + method.Func.Call([]reflect.Value{reflect.ValueOf(suite)}) + if tearDownTestSuite, ok := suite.(TearDownTestSuite); ok { + tearDownTestSuite.TearDownTest() + } + }, } + tests = append(tests, test) } } + if !testing.RunTests(func(pat, str string) (bool, error) { + return true, nil + }, tests) { + t.Fail() + } + if tearDownAllSuite, ok := suite.(TearDownAllSuite); ok { tearDownAllSuite.TearDownSuite() }
Have testing.RunTests run the tests we find with their setup methods.
stretchr_testify
train