hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
|---|---|---|---|---|
333848b6e49f7d72970a1f2ccd1972cbfa65e714
|
diff --git a/venues.go b/venues.go
index <HASH>..<HASH> 100644
--- a/venues.go
+++ b/venues.go
@@ -213,6 +213,7 @@ type User struct {
Contact Contact `json:"contact"`
}
+// Lists are Lists on User.
type Lists struct {
Groups []Group `json:"groups"`
}
@@ -260,11 +261,13 @@ type PhotoSource struct {
URL string `json:"url"`
}
+// Reasons why the venue is shown.
type Reasons struct {
Count int `json:"count"`
Items []Reason `json:"items"`
}
+// Reason is the items in Reasons.
type Reason struct {
Summary string `json:"summary"`
Type string `json:"type"`
@@ -274,11 +277,13 @@ type Reason struct {
Count int `json:"count"`
}
+// ReasonTarget where the reason would be shwon for Reason.
type ReasonTarget struct {
Type string `json:"type"`
Object ReasonObject `json:"object"`
}
+// ReasonObject is Object in ReasonTarget.
type ReasonObject struct {
ID string `json:"id"`
Type string `json:"type"`
@@ -286,6 +291,7 @@ type ReasonObject struct {
Ignoreable bool `json:"ignoreable"`
}
+// ReasonObjectTarget what type of target and the url for a ReasonObject.
type ReasonObjectTarget struct {
Type string `json:"type"`
URL string `json:"url"`
@@ -304,6 +310,7 @@ type HereNow struct {
Groups []HereNowGroup `json:"Groups"`
}
+// HereNowGroup is the groups item in HereNow.
type HereNowGroup struct {
Group
Items Omitted `json:"items"`
@@ -437,11 +444,13 @@ type Open struct {
RenderedTime string `json:"renderedTime"`
}
+// PageUpdates is on a Venue.
type PageUpdates struct {
Count int `json:"count"`
Items Omitted `json:"items"`
}
+// Inbox is on a Venue.
type Inbox struct {
Count int `json:"count"`
Items Omitted `json:"items"`
diff --git a/venues_aspects.go b/venues_aspects.go
index <HASH>..<HASH> 100644
--- a/venues_aspects.go
+++ b/venues_aspects.go
@@ -44,12 +44,14 @@ type venueEventResp struct {
Events Events `json:"events"`
}
+// Events is part of the respon for VenueService.Events
type Events struct {
Count int `json:"count"`
Summary string `json:"summary"`
Items []Event `json:"items"`
}
+// Event is the Items in Events.
type Event struct {
ID string `json:"id"`
Name string `json:"name"`
@@ -122,7 +124,7 @@ type venueLikesResp struct {
Likes LikesResp `json:"likes"`
}
-// Likesresp is the response for the venue likes endpoint
+// LikesResp is the response for the venue likes endpoint
type LikesResp struct {
Count int `json:"count"`
Summary string `json:"summary"`
@@ -148,6 +150,7 @@ type venueLinkResp struct {
Links Links `json:"links"`
}
+// Links is the response for VenueService.Links
type Links struct {
Count int `json:"count"`
Items []Link `json:"items"`
@@ -161,6 +164,7 @@ type Link struct {
URL string `json:"url"`
}
+// Provider is Provider in a Link.
type Provider struct {
ID string `json:"id"`
}
@@ -240,23 +244,27 @@ type venueMenuResp struct {
Menu MenuResp `json:"menu"`
}
+// MenuResp is the response for VenueService.Menu.
type MenuResp struct {
Provider MenuProvider `json:"provider"`
Menus Menus `json:"menus"`
}
+// MenuProvider is the Provider for the MenuResp.
type MenuProvider struct {
Name string `json:"name"`
AttributionImage string `json:"attributionImage"`
- AttributionLink string `json;"attributionLink"`
+ AttributionLink string `json:"attributionLink"`
AttributionText string `json:"attributionText"`
}
+// Menus is part of the MenueResp.
type Menus struct {
Count int `json:"count"`
Items []FullMenu `json:"items"`
}
+// FullMenu are the items on a Menu.
type FullMenu struct {
MenuID string `json:"menuId"`
Name string `json:"name"`
@@ -264,22 +272,26 @@ type FullMenu struct {
Entries Entries `json:"entries"`
}
+// Entries is the Entires on a FullMenu.
type Entries struct {
Count int `json:"count"`
Items []Entry `json:"items"`
}
+// Entry are the Items on a Entries.
type Entry struct {
SectionID string `json:"sectionId"`
Name string `json:"name"`
Entries SubEntries `json:"entries"`
}
+// SubEntries are the Entries on an Entry
type SubEntries struct {
Count int `json:"count"`
Items []SubEntry `json:"items"`
}
+// SubEntry are the Items on a SubEntry
type SubEntry struct {
EntryID string `json:"entryId"`
Name string `json:"name"`
|
add comments to the rest of the structs
|
peppage_foursquarego
|
train
|
fe80f87b198352cad0baa96b921e063748e0ac9b
|
diff --git a/tests/js/harness.js b/tests/js/harness.js
index <HASH>..<HASH> 100644
--- a/tests/js/harness.js
+++ b/tests/js/harness.js
@@ -9,8 +9,6 @@ require("../../");
fluid.defaults("gpii.test.pouch.harness", {
gradeNames: ["fluid.component"],
- port: 6789,
- baseUrl: "http://localhost:6789/",
events: {
expressStarted: null,
pouchStarted: null,
diff --git a/tests/js/launch-test-harness.js b/tests/js/launch-test-harness.js
index <HASH>..<HASH> 100644
--- a/tests/js/launch-test-harness.js
+++ b/tests/js/launch-test-harness.js
@@ -7,4 +7,7 @@ require("./harness");
fluid.setLogging(true);
-gpii.test.pouch.harness();
+gpii.test.pouch.harness({
+ port: 6789,
+ baseUrl: "http://localhost:6789/"
+});
|
GPII-<I>: Moved port options to avoid having defaults in the harness grade itself.
|
GPII_gpii-pouchdb
|
train
|
1030d613e034c93fbe6057d6ca3e79c1e91a0ff0
|
diff --git a/filterpy/common/helpers.py b/filterpy/common/helpers.py
index <HASH>..<HASH> 100644
--- a/filterpy/common/helpers.py
+++ b/filterpy/common/helpers.py
@@ -117,7 +117,6 @@ class Saver(object):
if save_current:
self.save()
-
def save(self):
""" save the current state of the Kalman filter"""
@@ -167,7 +166,6 @@ class Saver(object):
""" list of all keys"""
return list(self._DL.keys())
-
def to_array(self):
"""
Convert all saved attributes from a list to np.array.
@@ -189,6 +187,28 @@ class Saver(object):
raise ValueError("could not convert {} into np.array".format(key))
+ def flatten(self):
+ """
+ Flattens any np.array of column vectors into 1D arrays. Basically,
+ this makes data readable for humans if you are just inspecting via
+ the REPL. For example, if you have saved a KalmanFilter object with 89
+ epochs, self.x will be shape (89, 9, 1) (for example). After flatten
+ is run, self.x.shape == (89, 9), which displays nicely from the REPL.
+
+ There is no way to unflatten, so it's a one way trip.
+ """
+
+ for key in self.keys:
+ try:
+ arr = self.__dict__[key]
+ shape = arr.shape
+ if shape[2] == 1:
+ self.__dict__[key] = arr.reshape(shape[0], shape[1])
+ except:
+ # not an ndarray or not a column vector
+ pass
+
+
def runge_kutta4(y, x, dx, f):
"""computes 4th order Runge-Kutta for dy/dx.
|
Added flatten() to Saver
Just a convienence function to flatten any column vectors that are
saved, mostly so they display nicely in the REPL.
|
rlabbe_filterpy
|
train
|
fc63be111b11dbaa6331ce5e75937864683d8834
|
diff --git a/src/component/legend/LegendView.js b/src/component/legend/LegendView.js
index <HASH>..<HASH> 100644
--- a/src/component/legend/LegendView.js
+++ b/src/component/legend/LegendView.js
@@ -197,6 +197,7 @@ export default echarts.extendComponentView({
var itemWidth = legendModel.get('itemWidth');
var itemHeight = legendModel.get('itemHeight');
var inactiveColor = legendModel.get('inactiveColor');
+ var symbolKeepAspect = legendModel.get('symbolKeepAspect');
var isSelected = legendModel.isSelected(name);
var itemGroup = new Group();
@@ -217,7 +218,8 @@ export default echarts.extendComponentView({
itemWidth,
itemHeight,
isSelected ? color : inactiveColor,
- true
+ // symbolKeepAspect default true for legend
+ symbolKeepAspect == null ? true : symbolKeepAspect
));
// Compose symbols
@@ -232,8 +234,14 @@ export default echarts.extendComponentView({
}
// Put symbol in the center
itemGroup.add(createSymbol(
- symbolType, (itemWidth - size) / 2, (itemHeight - size) / 2, size, size,
- isSelected ? color : inactiveColor
+ symbolType,
+ (itemWidth - size) / 2,
+ (itemHeight - size) / 2,
+ size,
+ size,
+ isSelected ? color : inactiveColor,
+ // symbolKeepAspect default true for legend
+ symbolKeepAspect == null ? true : symbolKeepAspect
));
}
|
feat: support symbolKeepAspect for legend
|
apache_incubator-echarts
|
train
|
0ac0207f45744621f0a359800e4a2606327f7ff4
|
diff --git a/lib/ohai/plugins/c.rb b/lib/ohai/plugins/c.rb
index <HASH>..<HASH> 100644
--- a/lib/ohai/plugins/c.rb
+++ b/lib/ohai/plugins/c.rb
@@ -85,7 +85,7 @@ end
status, stdout, stderr = run_command(:no_status_check => true, :command => "cc -V -flags")
if status == 0
output = stderr.split
- if stderr !~ /^cc: error/ && output.size >= 4
+ if stderr =~ /^cc: Sun C/ && output.size >= 4
c[:sunpro] = Mash.new
c[:sunpro][:version] = output[3]
c[:sunpro][:description] = stderr.chomp
diff --git a/spec/ohai/plugins/c_spec.rb b/spec/ohai/plugins/c_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/ohai/plugins/c_spec.rb
+++ b/spec/ohai/plugins/c_spec.rb
@@ -233,11 +233,18 @@ describe Ohai::System, "plugin c" do
end
- it "should not set the languages[:c][:sunpro] tree if the corresponding cc command is not found" do
+ it "should not set the languages[:c][:sunpro] tree if the corresponding cc command fails on linux" do
fedora_error_message = "cc: error trying to exec 'i686-redhat-linux-gcc--flags': execvp: No such file or directory"
@ohai.stub!(:run_command).with({:no_status_check=>true, :command=>"cc -V -flags"}).and_return([0, "", fedora_error_message])
- @ohai._requre_plugin("c")
+ @ohai._require_plugin("c")
+ @ohai[:languages][:c].should_not have_key(:sunpro) if @ohai[:languages][:c]
+ end
+
+ it "should not set the languages[:c][:sunpro] tree if the corresponding cc command fails on hpux" do
+ hpux_error_message = "cc: warning 901: unknown option: `-flags': use +help for online documentation.\ncc: HP C/aC++ B3910B A.06.25 [Nov 30 2009]"
+ @ohai.stub!(:run_command).with({:no_status_check=>true, :command=>"cc -V -flags"}).and_return([0, "", hpux_error_message])
+ @ohai._require_plugin("c")
@ohai[:languages][:c].should_not have_key(:sunpro) if @ohai[:languages][:c]
end
|
[OHAI-<I>] Don't pick up sun C compiler on HP-UX systems
|
chef_ohai
|
train
|
25a5b2be3790026273ddbc2cf13934d4cf63ef51
|
diff --git a/lxd/container_lxc.go b/lxd/container_lxc.go
index <HASH>..<HASH> 100644
--- a/lxd/container_lxc.go
+++ b/lxd/container_lxc.go
@@ -4798,10 +4798,12 @@ func (c *containerLXC) tarStoreFile(linkmap map[uint64]string, offset int, tw *t
}
}
- // Handle xattrs.
- hdr.Xattrs, err = shared.GetAllXattr(path)
- if err != nil {
- return fmt.Errorf("failed to read xattr: %s", err)
+ // Handle xattrs (for real files only)
+ if link == "" {
+ hdr.Xattrs, err = shared.GetAllXattr(path)
+ if err != nil {
+ return fmt.Errorf("failed to read xattr: %s", err)
+ }
}
if err := tw.WriteHeader(hdr); err != nil {
|
Don't attempt to read xattrs from symlinks
Closes #<I>
|
lxc_lxd
|
train
|
b311f888f60aed037a9d2f63dde407f5a8c21f84
|
diff --git a/config.go b/config.go
index <HASH>..<HASH> 100644
--- a/config.go
+++ b/config.go
@@ -7,6 +7,7 @@ package main
import (
"fmt"
+ "net"
"net/url"
"os"
"os/user"
@@ -50,7 +51,8 @@ var (
defaultHost = "localhost"
defaultHTTPProfPath = "/p"
defaultAPIProto = "http"
- defaultAPIListen = "127.0.0.1:7777"
+ defaultAPIPort = "7777"
+ defaultAPIListen = defaultHost + ":" + defaultAPIPort
defaultIndentJSON = " "
defaultCacheControlMaxAge = 86400
defaultInsightReqRateLimit = 20.0
@@ -231,6 +233,37 @@ func cleanAndExpandPath(path string) string {
return filepath.Join(homeDir, path)
}
+// normalizeNetworkAddress checks for a valid local network address format and
+// adds default host and port if not present. Invalidates addresses that include
+// a protocol identifier.
+func normalizeNetworkAddress(a, defaultHost, defaultPort string) (string, error) {
+ if strings.Contains(a, "://") {
+ return a, fmt.Errorf("Address %s contains a protocol identifier, which is not allowed", a)
+ }
+ if a == "" {
+ return defaultHost + ":" + defaultPort, nil
+ }
+ host, port, err := net.SplitHostPort(a)
+ if err != nil {
+ if strings.Contains(err.Error(), "missing port in address") {
+ normalized := a + ":" + defaultPort
+ host, port, err = net.SplitHostPort(normalized)
+ if err != nil {
+ return a, fmt.Errorf("Unable to address %s after port resolution: %v", normalized, err)
+ }
+ } else {
+ return a, fmt.Errorf("Unable to normalize address %s: %v", a, err)
+ }
+ }
+ if host == "" {
+ host = defaultHost
+ }
+ if port == "" {
+ port = defaultPort
+ }
+ return host + ":" + port, nil
+}
+
// validLogLevel returns whether or not logLevel is a valid debug log level.
func validLogLevel(logLevel string) bool {
_, ok := slog.LevelFromString(logLevel)
@@ -533,8 +566,9 @@ func loadConfig() (*config, error) {
// Set the host names and ports to the default if the user does not specify
// them.
- if cfg.DcrdServ == "" {
- cfg.DcrdServ = defaultHost + ":" + activeNet.JSONRPCClientPort
+ cfg.DcrdServ, err = normalizeNetworkAddress(cfg.DcrdServ, defaultHost, activeNet.JSONRPCClientPort)
+ if err != nil {
+ return loadConfigError(err)
}
// Output folder
@@ -579,6 +613,19 @@ func loadConfig() (*config, error) {
}
cfg.PoliteiaAPIURL = urlPath
+ // Check the supplied APIListen address
+ cfg.APIListen, err = normalizeNetworkAddress(cfg.APIListen, defaultHost, defaultAPIPort)
+ if err != nil {
+ return loadConfigError(err)
+ }
+
+ // Expand some additional paths.
+ cfg.DcrdCert = cleanAndExpandPath(cfg.DcrdCert)
+ cfg.DBFileName = cleanAndExpandPath(cfg.DBFileName)
+ cfg.AgendasDBFileName = cleanAndExpandPath(cfg.AgendasDBFileName)
+ cfg.ProposalsFileName = cleanAndExpandPath(cfg.ProposalsFileName)
+ cfg.RateCertificate = cleanAndExpandPath(cfg.RateCertificate)
+
return &cfg, nil
}
diff --git a/config_test.go b/config_test.go
index <HASH>..<HASH> 100644
--- a/config_test.go
+++ b/config_test.go
@@ -271,3 +271,36 @@ func TestRetrieveRootPath(t *testing.T) {
}
}
}
+
+func TestNormalizeNetworkAddress(t *testing.T) {
+ defaultPort := "1234"
+ defaultHost := "localhost"
+ type test struct {
+ input string
+ expectation string
+ shouldBeError bool
+ }
+ tests := []test{
+ {":1234", "localhost:1234", false},
+ {"some.name", "some.name:1234", false},
+ {"192.168.0.2", "192.168.0.2:1234", false},
+ {"192.168.0.2:5678", "192.168.0.2:5678", false},
+ {"http://remote.com:5678", "http://remote.com:5678", true}, // Only local addresses supported.
+ {"", "localhost:1234", false},
+ {":", "localhost:1234", false},
+ }
+ for _, test := range tests {
+ translated, err := normalizeNetworkAddress(test.input, defaultHost, defaultPort)
+ if translated != test.expectation {
+ t.Errorf("Unexpected result. input: %s, returned: %s, expected: %s", test.input, translated, test.expectation)
+ }
+ if err != nil {
+ if test.shouldBeError {
+ continue
+ }
+ t.Errorf("Unexpected error parsing %s: %v", test.input, err)
+ } else if test.shouldBeError {
+ t.Errorf("Error expected but not seen for %s", test.input)
+ }
+ }
+}
|
config: normalize addresses and expand paths (#<I>)
* config: normalize addresses and expand paths
Check for valid host and port for dcrd and API, adding default if
not present. Expand all filepaths.
* error if protocol identifier in local network addresses
|
decred_dcrdata
|
train
|
11dd937d1f38c5b14ca20d8bf393148dc7edfee7
|
diff --git a/lib/dragonfly/image_magick/generators/text.rb b/lib/dragonfly/image_magick/generators/text.rb
index <HASH>..<HASH> 100644
--- a/lib/dragonfly/image_magick/generators/text.rb
+++ b/lib/dragonfly/image_magick/generators/text.rb
@@ -69,9 +69,9 @@ module Dragonfly
tempfile = convert(args.join(' '), format)
if (padding_top || padding_right || padding_bottom || padding_left)
- attrs = command_line.identify(tempfile)
- text_width = attrs[:width].to_i
- text_height = attrs[:height].to_i
+ dimensions = command_line.identify(tempfile, "-ping -format '%w %h'").split
+ text_width = dimensions[0].to_i
+ text_height = dimensions[1].to_i
width = padding_left + text_width + padding_right
height = padding_top + text_height + padding_bottom
|
proper use of command line in text generator
|
markevans_dragonfly
|
train
|
6cd5309544f1104b6a3db4b1d1568cd5d89fea1c
|
diff --git a/packages/@uppy/utils/src/canvasToBlob.js b/packages/@uppy/utils/src/canvasToBlob.js
index <HASH>..<HASH> 100644
--- a/packages/@uppy/utils/src/canvasToBlob.js
+++ b/packages/@uppy/utils/src/canvasToBlob.js
@@ -1,5 +1,3 @@
-const dataURItoBlob = require('./dataURItoBlob')
-
/**
* Save a <canvas> element's content to a Blob object.
*
@@ -7,12 +5,7 @@ const dataURItoBlob = require('./dataURItoBlob')
* @returns {Promise}
*/
module.exports = function canvasToBlob (canvas, type, quality) {
- if (canvas.toBlob) {
- return new Promise((resolve) => {
- canvas.toBlob(resolve, type, quality)
- })
- }
- return Promise.resolve().then(() => {
- return dataURItoBlob(canvas.toDataURL(type, quality), {})
+ return new Promise((resolve) => {
+ canvas.toBlob(resolve, type, quality)
})
}
|
@uppy/utils: simplify `canvasToBlob` (#<I>)
The built-in `HTMLCanvasElement#toBlob` is supported everywhere.
|
transloadit_uppy
|
train
|
01ca3ee7aeb9f21f292060b16a54cb527bf0cee8
|
diff --git a/lib/json/api/resource.rb b/lib/json/api/resource.rb
index <HASH>..<HASH> 100644
--- a/lib/json/api/resource.rb
+++ b/lib/json/api/resource.rb
@@ -70,7 +70,7 @@ module JSON
end
def filters(*attrs)
- @_allowed_filters.merge(*attrs)
+ @_allowed_filters.merge(attrs)
end
def filter(attr)
diff --git a/test/fixtures/active_record.rb b/test/fixtures/active_record.rb
index <HASH>..<HASH> 100644
--- a/test/fixtures/active_record.rb
+++ b/test/fixtures/active_record.rb
@@ -186,7 +186,7 @@ class PostResource < JSON::API::Resource
super(keys - [:subject])
end
- filters [:title, :author]
+ filters :title, :author
filter :id
end
|
Fixed issue with filters method expecting an array of filters
|
cerebris_jsonapi-resources
|
train
|
b9038a8ffeae80bf43395c08f2cb1b6b04c486c5
|
diff --git a/src/example-types/esTwoLevelAggregation.js b/src/example-types/esTwoLevelAggregation.js
index <HASH>..<HASH> 100644
--- a/src/example-types/esTwoLevelAggregation.js
+++ b/src/example-types/esTwoLevelAggregation.js
@@ -38,19 +38,11 @@ module.exports = {
),
aggs: {
...(validMetrics
- ? _.reduce(
- (obj, metric) =>
- _.extend(
- {
- [`twoLevelAgg_${metric}`]: {
- [metric]: { field: context.value_field },
- },
- },
- obj
- ),
- {},
- context.include
- )
+ ? F.arrayToObject(
+ metric => `twoLevelAgg_${metric}`,
+ metric => ({ [metric]: { field: context .value_field } }),
+ context.include
+ )
: {
twoLevelAgg: {
[context.value_type]: _.omitBy(
diff --git a/src/example-types/terms_stats.js b/src/example-types/terms_stats.js
index <HASH>..<HASH> 100644
--- a/src/example-types/terms_stats.js
+++ b/src/example-types/terms_stats.js
@@ -1,4 +1,5 @@
var _ = require('lodash/fp')
+let F = require('futil-js')
var esTwoLevel = require('./esTwoLevelAggregation').result
let { buildRegexQueryForWords } = require('../regex')
let { getField } = require('../fields')
@@ -8,17 +9,11 @@ module.exports = {
validContext: context => context.key_field && context.value_field,
async result(context, search, schema) {
let field = getField(schema, context.key_field, context.fieldMode)
- let orderPaths = _.reduce(
- (obj, metric) =>
- _.extend(
- {
- [metric]: {
- [`twoLevelAgg_${metric}.value`]: context.sortDir || 'desc',
- },
- },
- obj
- ),
- {},
+ let orderPaths = F.arrayToObject(
+ _.identity,
+ metric => ({
+ [`twoLevelAgg_${metric}.value`]: context.sortDir || 'desc',
+ }),
metrics
)
let x = await esTwoLevel(
|
Use F.arrayToObject per PR suggestions.
|
smartprocure_contexture-elasticsearch
|
train
|
4557515fd848da66310cf1078e3da3aefc400563
|
diff --git a/src/index.js b/src/index.js
index <HASH>..<HASH> 100644
--- a/src/index.js
+++ b/src/index.js
@@ -6,25 +6,26 @@ const SELECT_STATE = state => state.routing;
// Action creator
-function updatePath(path, noRouterUpdate) {
+function updatePath(path, avoidRouterUpdate) {
return {
type: UPDATE_PATH,
path: path,
- noRouterUpdate: noRouterUpdate
+ avoidRouterUpdate: !!avoidRouterUpdate
}
}
// Reducer
const initialState = typeof window === 'undefined' ? {} : {
- path: locationToString(window.location)
+ path: locationToString(window.location),
+ changeId: 1
};
function update(state=initialState, action) {
if(action.type === UPDATE_PATH) {
return Object.assign({}, state, {
path: action.path,
- noRouterUpdate: action.noRouterUpdate
+ changeId: state.changeId + (action.avoidRouterUpdate ? 0 : 1)
});
}
return state;
@@ -37,8 +38,7 @@ function locationToString(location) {
}
function syncReduxAndRouter(history, store, selectRouterState = SELECT_STATE) {
- let isTransitioning = false;
- let currentLocation;
+ let lastChangeId = 0;
const getRouterState = () => selectRouterState(store.getState());
if(!getRouterState()) {
@@ -49,31 +49,22 @@ function syncReduxAndRouter(history, store, selectRouterState = SELECT_STATE) {
}
const unsubscribeHistory = history.listen(location => {
- currentLocation = location;
- isTransitioning = false;
+ const routePath = locationToString(location);
- // Avoid dispatching an action if the store is already up-to-date,
- // even if `history` wouldn't do anything if the location is the same
- if(getRouterState().path !== locationToString(location)) {
- store.dispatch(updatePath(newLocation));
+ // Avoid dispatching an action if the store is already up-to-date
+ if(getRouterState().path !== routePath) {
+ store.dispatch(updatePath(routePath, { avoidRouterUpdate: true }));
}
});
const unsubscribeStore = store.subscribe(() => {
const routing = getRouterState();
- // Don't update the router if they are already in sync, or if
- // we've already triggered an update for this path. The latter can
- // happen if any state changes happen during transitions (for
- // example: updating app state during `listenBefore`).
- //
- // The `noRouterUpdate` flag can be set to avoid updating
- // altogether, which is useful for things like loading snapshots
- // or very special edge cases.
- if(!isTransitioning &&
- routing.path !== locationToString(currentLocation) &&
- !routing.noRouterUpdate) {
- isTransitioning = true;
+ // Only update the router once per `updatePath` call. This is
+ // indicated by the `changeId` state; when that number changes, we
+ // should call `pushState`.
+ if(lastChangeId !== routing.changeId) {
+ lastChangeId = routing.changeId;
history.pushState(null, routing.path);
}
});
|
simplify how we track when to tell the router to update
|
reactjs_react-router-redux
|
train
|
d6b46fcb037d1ad383fd538793ac06c739186143
|
diff --git a/jplephem/test.py b/jplephem/test.py
index <HASH>..<HASH> 100644
--- a/jplephem/test.py
+++ b/jplephem/test.py
@@ -1,6 +1,12 @@
-"""Test harness for checking jplephem against actual JPL computations."""
+"""Test harness for checking jplephem against actual JPL computations.
+This test can be invoked with a simple::
+
+ python -m jplephem.test
+
+"""
import numpy as np
+import sys
from .ephem import Ephemeris
def testpo(ephemeris, testpo_path):
@@ -90,4 +96,22 @@ def test_all():
if __name__ == '__main__':
- test_all()
+ try:
+ test_all()
+ except IOError:
+ raise
+ print >>sys.stderr, """
+Cannot find the JPL "testpo" files against which this test suite
+validates that the positions it generates are correct. To fetch them,
+run these four commands in your current working directory:
+
+ ftp://ssd.jpl.nasa.gov/pub/eph/planets/ascii/de405/
+ ftp://ssd.jpl.nasa.gov/pub/eph/planets/ascii/de406/
+ ftp://ssd.jpl.nasa.gov/pub/eph/planets/ascii/de422/
+ ftp://ssd.jpl.nasa.gov/pub/eph/planets/ascii/de423/
+
+These commands create a "ssd.jpl.nasa.gov" directory containing the
+necessary files. When you are done running the tests, simply remove the
+directory.
+"""
+ exit(1)
|
Added a message for how to download the test data
|
brandon-rhodes_python-jplephem
|
train
|
d70815725a37d415d43368b91ee1c0e573aafd7b
|
diff --git a/src/alchemist/__init__.py b/src/alchemist/__init__.py
index <HASH>..<HASH> 100644
--- a/src/alchemist/__init__.py
+++ b/src/alchemist/__init__.py
@@ -239,11 +239,10 @@ class Alchemist(flask.Flask):
self.metadata[name] = meta
# Update registry.
+ M = DeclarativeMeta
self.models[name].update(set(
- filter(lambda o: isinstance(o,
- DeclarativeMeta),
- cls._decl_class_registry.values())))
-
+ filter(lambda o: isinstance(o, M),
+ cls._decl_class_registry.values())))
# Clear modules.
modules, package = None, None
diff --git a/src/alchemist/commands/db.py b/src/alchemist/commands/db.py
index <HASH>..<HASH> 100644
--- a/src/alchemist/commands/db.py
+++ b/src/alchemist/commands/db.py
@@ -1,15 +1,10 @@
# -*- coding: utf-8 -*-
-import collections
-import sys
import contextlib
-from importlib import import_module
-import sqlalchemy as sa
+import datetime
from sqlalchemy import schema
from sqlalchemy.orm.query import Query
from flask.ext import script
-from termcolor import colored
from alchemist import application
-from alchemist.conf import settings
from .utils import print_command
@@ -38,10 +33,10 @@ def _render_statement(statement, bind=None):
return self.render_literal_value(bindparam.value, bindparam.type)
def render_literal_value(self, value, type_):
- if isinstance(value, long):
+ if isinstance(value, int):
return str(value)
- elif isinstance(value, (date, datetime)):
+ elif isinstance(value, (datetime.date, datetime.datetime)):
return "'%s'" % value
return super(LiteralCompiler, self).render_literal_value(
diff --git a/src/alchemist/commands/utils.py b/src/alchemist/commands/utils.py
index <HASH>..<HASH> 100644
--- a/src/alchemist/commands/utils.py
+++ b/src/alchemist/commands/utils.py
@@ -1,21 +1,13 @@
# -*- coding: utf-8 -*-
-import collections
import sys
-import contextlib
-from importlib import import_module
-import sqlalchemy as sa
-from sqlalchemy import schema
-from sqlalchemy.orm.query import Query
-from flask.ext import script
from termcolor import colored
-from alchemist import application
-from alchemist.conf import settings
# Alias some colors.
colored_info = lambda x: colored(x, 'white', attrs=['dark'])
colored_command = lambda x: colored(x, 'cyan')
colored_name = lambda x: colored(x, 'white')
+
def print_command(info, command, name, *more):
print(colored_info(info),
colored_command(command),
diff --git a/tests/alchemist/test_management.py b/tests/alchemist/test_management.py
index <HASH>..<HASH> 100644
--- a/tests/alchemist/test_management.py
+++ b/tests/alchemist/test_management.py
@@ -1,8 +1,6 @@
# -*- coding: utf-8 -*-
import sys
import os
-import io
-import contextlib
from alchemist import management
import py
|
Update to comply with flake8.
|
concordusapps_alchemist
|
train
|
417a6b5b27a546f6f68f9803f76f85d9f8c5a57e
|
diff --git a/lib/index.js b/lib/index.js
index <HASH>..<HASH> 100644
--- a/lib/index.js
+++ b/lib/index.js
@@ -49,7 +49,7 @@ O.scope = 'control';
// Public // {{{1
exports.browserConfig = true;
-exports.config = function(name, data, deps) {
+exports.config = function(name, val, deps) {
O.kind('./lirc', 'lirc', deps);
O.content('../content');
diff --git a/lib/lirc/index.js b/lib/lirc/index.js
index <HASH>..<HASH> 100644
--- a/lib/lirc/index.js
+++ b/lib/lirc/index.js
@@ -27,9 +27,9 @@ exports = O.append('node').exports;
* Fired when the lirc module receives a command
*
* @event receive
- * @param data {Object}
- * @param data.count {Number} Incremented number reached during long press
- * @param data.remote {String} Remote controller name
- * @param data.lirc {String} Original LIRC command name
- * @param data.key {String} Command name after pre-processing
+ * @param val {Object}
+ * @param val.count {Number} Incremented number reached during long press
+ * @param val.remote {String} Remote controller name
+ * @param val.lirc {String} Original LIRC command name
+ * @param val.key {String} Command name after pre-processing
*/
diff --git a/lib/lirc/node.js b/lib/lirc/node.js
index <HASH>..<HASH> 100644
--- a/lib/lirc/node.js
+++ b/lib/lirc/node.js
@@ -20,49 +20,49 @@ function onError() { // {{{2
setTimeout(connect.bind(null, this), 1000);
};
-function onData(data) { // {{{2
-// console.log('LIRC DATA', data);
-
- data = data.split(' ');
- data = {
- remote: data[3],
- lirc: data[2],
- count: parseInt(data[1], 16)
+function onData(val) { // {{{2
+// console.log('LIRC val', val);
+
+ val = val.split(' ');
+ val = {
+ remote: val[3],
+ lirc: val[2],
+ count: parseInt(val[1], 16)
};
-// console.log('LIRC DATA', data);
+// console.log('LIRC val', val);
if (
this.lastData &&
(this.lastData.count === 0) &&
- (this.lastData.remote === data.remote) &&
- (this.lastData.lirc === data.lirc) &&
+ (this.lastData.remote === val.remote) &&
+ (this.lastData.lirc === val.lirc) &&
(this.lastTime > (new Date().getTime() - 200))
) {
-// console.log('DOUBLE KEY SUPRESSED', data);
+// console.log('DOUBLE KEY SUPRESSED', val);
return;
}
- this.lastData = data;
+ this.lastData = val;
this.lastTime = new Date().getTime();
var match =
- data.lirc.match(/^KEY_(.*)/) ||
- data.lirc.match(/^BTN_(.*)/)
+ val.lirc.match(/^KEY_(.*)/) ||
+ val.lirc.match(/^BTN_(.*)/)
;
if (match) {
- data.key = match[1].toLowerCase();
+ val.key = match[1].toLowerCase();
} else {
- data.key = data.lirc.toLowerCase();
+ val.key = val.lirc.toLowerCase();
}
- this.emit('receive', data);
+ this.emit('receive', val);
return;
};
-function onEnd(data) { // {{{2
- O.log.unhandled('lirc end', data);
+function onEnd(val) { // {{{2
+ O.log.unhandled('lirc end', val);
delete this.socket;
@@ -72,7 +72,7 @@ function onEnd(data) { // {{{2
// }}}1
// Private {{{1
function connect(entry) { // {{{2
- entry.socket = Net.connect({path: entry.data.socket || '/var/run/lirc/lircd'});
+ entry.socket = Net.connect({path: entry.dval.socket || '/var/run/lirc/lircd'});
entry.socket.setEncoding('utf8');
entry.socket.on('error', onError.bind(entry));
entry.socket.on('data', onData.bind(entry));
|
Rename: "entry.data" => "entry.dval"
|
OpenSmartEnvironment_ose-lirc
|
train
|
ce8b720fe834668e082cd1b505ce674a8bab744d
|
diff --git a/lib/linkage/runner.rb b/lib/linkage/runner.rb
index <HASH>..<HASH> 100644
--- a/lib/linkage/runner.rb
+++ b/lib/linkage/runner.rb
@@ -5,11 +5,13 @@ module Linkage
# @param [Linkage::Configuration] config
# @param [String] uri Sequel-style database URI
+ # @param [Hash] options Sequel.connect options
# @see Dataset#link_with
# @see http://sequel.rubyforge.org/rdoc/files/doc/opening_databases_rdoc.html Sequel: Connecting to a database
- def initialize(config, uri)
+ def initialize(config, uri, options = {})
@config = config
@uri = uri
+ @options = options
@next_group_id = 1
@next_group_mutex = Mutex.new
end
@@ -22,7 +24,7 @@ module Linkage
protected
def database(&block)
- Sequel.connect(@uri, &block)
+ Sequel.connect(@uri, @options, &block)
end
def create_tables
@@ -37,6 +39,7 @@ module Linkage
column(:record_id, pk_type[:type], pk_type[:opts] || {})
Integer :group_id
Integer :dataset
+ index :group_id
end
end
end
diff --git a/test/unit/test_single_threaded_runner.rb b/test/unit/test_single_threaded_runner.rb
index <HASH>..<HASH> 100644
--- a/test/unit/test_single_threaded_runner.rb
+++ b/test/unit/test_single_threaded_runner.rb
@@ -8,7 +8,4 @@ class UnitTests::TestSingleThreadedRunner < Test::Unit::TestCase
test "responds to execute" do
assert_include Linkage::SingleThreadedRunner.public_instance_methods(false), :execute
end
-
- test "execute" do
- end
end
|
Add Sequel.connect options to Runner
|
coupler_linkage
|
train
|
757230b1052415a6698227b2b0faa974f5bf4dc4
|
diff --git a/Kwf/Assets/Components/Dependency/Abstract.php b/Kwf/Assets/Components/Dependency/Abstract.php
index <HASH>..<HASH> 100644
--- a/Kwf/Assets/Components/Dependency/Abstract.php
+++ b/Kwf/Assets/Components/Dependency/Abstract.php
@@ -85,6 +85,9 @@ class Kwf_Assets_Components_Dependency_Abstract extends Kwf_Assets_Dependency_Ab
foreach ($this->_componentDependencies as $dep) {
$dep->warmupCaches();
}
+ if (!$this->usesLanguage()) {
+ $this->getContentsPacked(null); //creates/updates cache file
+ }
}
public function usesLanguage()
@@ -100,23 +103,10 @@ class Kwf_Assets_Components_Dependency_Abstract extends Kwf_Assets_Dependency_Ab
public function getContentsPacked($language)
{
- $hash = '';
- foreach ($this->_componentDependencies as $dep) {
- $src = $dep->getContentsSource();
- if ($src['type'] == 'file') {
- $hash .= md5_file($src['file']);
- } else if ($src['type'] == 'contents') {
- $hash .= md5($src['contents']);
- } else {
- throw new Kwf_Exception_NotYetImplemented();
- }
- }
- $hash = md5($hash);
$cacheFile = "cache/componentassets/{$this->_componentClass}".
($this->usesLanguage() ? "-$language" : '').
- "-".Kwf_Config::getValue('application.uniquePrefix')."-$hash";
-
- if (file_exists($cacheFile)) {
+ "-".Kwf_Config::getValue('application.uniquePrefix')."-".str_replace('text/', '', $this->getMimeType());
+ if (file_exists($cacheFile) && filemtime($cacheFile) > $this->getMTime()) {
$ret = Kwf_SourceMaps_SourceMap::createFromInline(file_get_contents($cacheFile));
} else {
$ret = Kwf_SourceMaps_SourceMap::createEmptyMap('');
diff --git a/Kwf/Assets/Dependency/File/Scss.php b/Kwf/Assets/Dependency/File/Scss.php
index <HASH>..<HASH> 100644
--- a/Kwf/Assets/Dependency/File/Scss.php
+++ b/Kwf/Assets/Dependency/File/Scss.php
@@ -173,4 +173,18 @@ class Kwf_Assets_Dependency_File_Scss extends Kwf_Assets_Dependency_File_Css
}
return new Kwf_SourceMaps_SourceMap(file_get_contents($cacheFile.'.map'), file_get_contents($cacheFile));
}
+
+ public function getMTime()
+ {
+ $ret = parent::getMTime();
+ $cacheFile = $this->_getCacheFileName();
+ if (!file_exists("$cacheFile.sourcetimes")) {
+ $this->warmupCaches();
+ }
+ $sourceTimes = unserialize(file_get_contents("$cacheFile.sourcetimes"));
+ foreach ($sourceTimes as $t) {
+ if (file_exists($t['file'])) $ret = max($ret, filemtime($t['file']));
+ }
+ return $ret;
+ }
}
|
Correctly rebuild Component css when included scss file changed
|
koala-framework_koala-framework
|
train
|
a301044d1ce4ced23fffe7f9021dfb0f51c296ac
|
diff --git a/packages/sketch/package.json b/packages/sketch/package.json
index <HASH>..<HASH> 100644
--- a/packages/sketch/package.json
+++ b/packages/sketch/package.json
@@ -20,6 +20,7 @@
"build": "cross-env NODE_ENV=production skpm-build",
"clean": "rimraf carbon-elements.sketchplugin",
"develop": "cross-env NODE_ENV=development skpm-build --watch",
+ "log": "skpm log -f",
"skpm:link": "skpm-link"
},
"dependencies": {
diff --git a/packages/sketch/src/commands/command.js b/packages/sketch/src/commands/command.js
index <HASH>..<HASH> 100644
--- a/packages/sketch/src/commands/command.js
+++ b/packages/sketch/src/commands/command.js
@@ -22,7 +22,7 @@ export function command(name, fn) {
sketch.UI.message('Done! 🎉');
} catch (error) {
console.log(error);
- sketch.UI.message('An error occured, please check the development logs');
+ sketch.UI.message('An error occurred, please check the development logs');
}
if (process.env.NODE_ENV === 'development') {
diff --git a/packages/sketch/src/commands/icons/generate.js b/packages/sketch/src/commands/icons/generate.js
index <HASH>..<HASH> 100644
--- a/packages/sketch/src/commands/icons/generate.js
+++ b/packages/sketch/src/commands/icons/generate.js
@@ -68,6 +68,11 @@ export function generate() {
const [_type, _category, _subcategory, name, size] = parts;
return name === icon && size === '32';
});
+
+ if (!symbol) {
+ throw new Error(`Unable to find symbol for icon ${icon}!`);
+ }
+
const instance = symbol.createNewInstance();
instance.frame.offset(ICON_X_OFFSET, ICON_Y_OFFSET);
|
fix(sketch): add undefined exception handler in icon generate script (#<I>)
* chore: add log script
* chore: fix typo
* fix(sketch): handle undefined exception
|
carbon-design-system_carbon-components
|
train
|
450d7c20af2e91fb8290fa2b8f7cf6612d7aed4c
|
diff --git a/bin/dna-to-aa.py b/bin/dna-to-aa.py
index <HASH>..<HASH> 100755
--- a/bin/dna-to-aa.py
+++ b/bin/dna-to-aa.py
@@ -15,6 +15,7 @@ import argparse
from Bio.Data.CodonTable import TranslationError
+from dark.reads import DNARead, RNARead
from dark.fasta import FastaReads
@@ -36,7 +37,13 @@ if __name__ == '__main__':
'this length will not be produced.')
args = parser.parse_args()
- reads = FastaReads(sys.stdin, args.type)
+
+ if args.type == 'dna':
+ readClass = DNARead
+ else:
+ readClass = RNARead
+
+ reads = FastaReads(sys.stdin, readClass)
write = sys.stdout.write
minORFLength = args.minORFLength
|
TRIVIAL: make dna-to-aa.py pass a read class to FastaReads
|
acorg_dark-matter
|
train
|
6608ea6f422a1902a1087eea1a83477a637157b5
|
diff --git a/raiden/tasks.py b/raiden/tasks.py
index <HASH>..<HASH> 100644
--- a/raiden/tasks.py
+++ b/raiden/tasks.py
@@ -144,7 +144,8 @@ class StartMediatedTransferTask(Task):
# someone down the line timedout / couldn't proceed
elif isinstance(response, (RefundTransfer, TransferTimeout)):
- # XXX
+ if hashlock in channel.partner_state.locked:
+ channel.partner_state.locked.remove(hashlock)
self.transfermanager.on_hashlock_result(hashlock, False)
# `target` received the MediatedTransfer
@@ -203,7 +204,6 @@ class StartMediatedTransferTask(Task):
if response.sender == next_hop:
if isinstance(response, (RefundTransfer, TransferTimeout)):
- # XXX
return response
else:
log.info('Partner {} sent an invalid message'.format(pex(next_hop)))
@@ -296,9 +296,11 @@ class MediateTransferTask(Task): # pylint: disable=too-many-instance-attributes
))
timeout = channel.create_timeouttransfer_for(transfer)
raiden.send(transfer.sender, timeout)
- self.transfermanager.on_hashlock_result(transfer.hashlock, False)
+ self.transfermanager.on_hashlock_result(transfer.lock.hashlock, False)
return
else:
+ # XXX: need to verify if extra action might be needed here, like
+ # removing locks
channel.register_transfer(response)
elif isinstance(response, Secret):
@@ -318,11 +320,18 @@ class MediateTransferTask(Task): # pylint: disable=too-many-instance-attributes
from_channel.register_transfer(refund_transfer)
raiden.send(from_address, refund_transfer)
- log.debug('REFUND MEDIATED TRANSFER from={} {}'.format(
+ log.debug('REFUND MEDIATED TRANSFER from={} to={}'.format(
pex(from_address),
pex(raiden.address),
))
+ # XXX: can we assume the hashlock will always be present on both EndStates?
+ hashlock = transfer.lock.hashlock
+ if hashlock in originating_channel.our_state.locked:
+ originating_channel.our_state.locked.remove(transfer.lock.hashlock)
+ if hashlock in originating_channel.partner_state.locked:
+ originating_channel.partner_state.locked.remove(transfer.lock.hashlock)
+
self.transfermanager.on_hashlock_result(transfer.lock.hashlock, False)
def send_and_wait_valid(self, raiden, path, mediated_transfer):
diff --git a/raiden/tests/test_transfer.py b/raiden/tests/test_transfer.py
index <HASH>..<HASH> 100644
--- a/raiden/tests/test_transfer.py
+++ b/raiden/tests/test_transfer.py
@@ -157,6 +157,10 @@ def test_mediated_transfer(raiden_network):
@pytest.mark.parametrize('asset', [sha3('cancel_transfer')[:20]])
@pytest.mark.parametrize('deposit', [100])
def test_cancel_transfer(raiden_chain, asset, deposit):
+
+ # TODO: use 4 nodes instead of 3 to check handling of MediatedTransfer as well
+ # as StartMediatedTransfer...
+
app0, app1, app2 = raiden_chain # pylint: disable=unbalanced-tuple-unpacking
messages = setup_messages_cb()
|
Fixes to cancel transfer by removing locked transfers on errors
|
raiden-network_raiden
|
train
|
1d345e53cd437c75a0f9c8b55d7e0660dfb1efc4
|
diff --git a/lib/sprockets_uglifier_with_source_maps/compressor.rb b/lib/sprockets_uglifier_with_source_maps/compressor.rb
index <HASH>..<HASH> 100644
--- a/lib/sprockets_uglifier_with_source_maps/compressor.rb
+++ b/lib/sprockets_uglifier_with_source_maps/compressor.rb
@@ -9,15 +9,16 @@ module SprocketsUglifierWithSM
def initialize(options = {})
# merge in any options passed in from our rails configuration - i wish
# rails actually did this by default :/
- options = options.merge(DEFAULTS).merge!(Rails.application.config.assets.uglifier.to_h)
- super options
+ @options = options.merge(DEFAULTS).merge!(Rails.application.config.assets.uglifier.to_h)
+ super @options
end
def call(input)
data = input.fetch(:data)
name = input.fetch(:name)
- compressed_data, sourcemap = @uglifier.compile_with_map(data)
+ uglifier = Sprockets::Autoload::Uglifier.new(@options)
+ compressed_data, sourcemap = uglifier.compile_with_map(data)
uncompressed_filename = File.join(Rails.application.config.assets.prefix, Rails.application.config.assets.uncompressed_prefix, "#{name}-#{digest(data)}.js")
uncompressed_url = filename_to_url uncompressed_filename
|
use local instance of Uglifier since sprockets initializes it differently now
|
AlexanderPavlenko_sprockets_uglifier_with_source_maps
|
train
|
dd7b4b98a3daae3bbf5f82ffee986a02d4a913c8
|
diff --git a/controller/common/src/Controller/Common/Common/Import/Xml/Processor/Lists/Standard.php b/controller/common/src/Controller/Common/Common/Import/Xml/Processor/Lists/Standard.php
index <HASH>..<HASH> 100644
--- a/controller/common/src/Controller/Common/Common/Import/Xml/Processor/Lists/Standard.php
+++ b/controller/common/src/Controller/Common/Common/Import/Xml/Processor/Lists/Standard.php
@@ -47,7 +47,7 @@ class Standard
{
foreach( $node->childNodes as $listNode )
{
- if( $listNode->nodeName === '#text' ) {
+ if( $listNode->nodeName[0] === '#' ) {
continue;
}
|
Skip comments in XML files too
|
aimeos_ai-controller-jobs
|
train
|
d86f9d34a8d2dcb2841fdac77e461f725a3f2b01
|
diff --git a/src/InMemoryRepository.php b/src/InMemoryRepository.php
index <HASH>..<HASH> 100644
--- a/src/InMemoryRepository.php
+++ b/src/InMemoryRepository.php
@@ -126,11 +126,7 @@ class InMemoryRepository implements EditableRepository
$resources = array();
if (false !== strpos($query, '*')) {
- $resources = iterator_to_array(new GlobFilterIterator(
- $query,
- new ArrayIterator($this->resources),
- GlobFilterIterator::FILTER_KEY
- ));
+ $resources = $this->getGlobIterator($query);
} elseif (isset($this->resources[$query])) {
$resources = array($this->resources[$query]);
}
@@ -152,11 +148,7 @@ class InMemoryRepository implements EditableRepository
$query = Path::canonicalize($query);
if (false !== strpos($query, '*')) {
- $iterator = new GlobFilterIterator(
- $query,
- new ArrayIterator($this->resources),
- GlobFilterIterator::FILTER_KEY
- );
+ $iterator = $this->getGlobIterator($query);
$iterator->rewind();
return $iterator->valid();
@@ -245,11 +237,7 @@ class InMemoryRepository implements EditableRepository
$nbOfResources = count($this->resources);
if (false !== strpos($query, '*')) {
- $resourcesToRemove = new GlobFilterIterator(
- $query,
- new ArrayIterator($this->resources),
- GlobFilterIterator::FILTER_KEY
- );
+ $resourcesToRemove = $this->getGlobIterator($query);
} elseif (isset($this->resources[$query])) {
$resourcesToRemove[] = $this->resources[$query];
}
@@ -290,17 +278,7 @@ class InMemoryRepository implements EditableRepository
throw ResourceNotFoundException::forPath($path);
}
- $staticPrefix = rtrim($path, '/').'/';
- $regExp = '~^'.preg_quote($staticPrefix, '~').'[^/]+$~';
-
- $resources = iterator_to_array(new RegexFilterIterator(
- $regExp,
- $staticPrefix,
- new ArrayIterator($this->resources),
- RegexFilterIterator::FILTER_KEY
- ));
-
- return new ArrayResourceCollection($resources);
+ return new ArrayResourceCollection($this->getChildIterator($path));
}
/**
@@ -316,15 +294,7 @@ class InMemoryRepository implements EditableRepository
throw ResourceNotFoundException::forPath($path);
}
- $staticPrefix = rtrim($path, '/').'/';
- $regExp = '~^'.preg_quote($staticPrefix, '~').'[^/]+$~';
-
- $iterator = new RegexFilterIterator(
- $regExp,
- $staticPrefix,
- new ArrayIterator($this->resources),
- RegexFilterIterator::FILTER_KEY
- );
+ $iterator = $this->getChildIterator($path);
$iterator->rewind();
return $iterator->valid();
@@ -361,17 +331,16 @@ class InMemoryRepository implements EditableRepository
}
$basePath = '/' === $path ? $path : $path.'/';
+
+ // Read children before attaching the resource to this repository
$children = $resource->listChildren();
- // Attach resource to locator *after* calling listChildren(), because
- // this method usually depends on the previously attached repository
$resource->attachTo($this, $path);
- // Add the resource before adding nested resources, so that the
- // array stays sorted
+ // Add the resource before adding its children, so that the array
+ // stays sorted
$this->resources[$path] = $resource;
- // Recursively attach directory contents
foreach ($children as $name => $child) {
$this->addResource($basePath.$name, $child);
}
@@ -379,19 +348,57 @@ class InMemoryRepository implements EditableRepository
private function removeResource(Resource $resource)
{
+ $path = $resource->getPath();
+
// Ignore non-existing resources
- if (!isset($this->resources[$resource->getPath()])) {
+ if (!isset($this->resources[$path])) {
return;
}
// Recursively register directory contents
- foreach ($this->listChildren($resource->getPath()) as $child) {
+ foreach ($this->getChildIterator($path) as $child) {
$this->removeResource($child);
}
- unset($this->resources[$resource->getPath()]);
+ unset($this->resources[$path]);
// Detach from locator
$resource->detach($this);
}
+
+ /**
+ * Returns an iterator for the children of a path.
+ *
+ * @param string $path The resource path.
+ *
+ * @return RegexFilterIterator|Resource[] The iterator.
+ */
+ private function getChildIterator($path)
+ {
+ $staticPrefix = rtrim($path, '/').'/';
+ $regExp = '~^'.preg_quote($staticPrefix, '~').'[^/]+$~';
+
+ return new RegexFilterIterator(
+ $regExp,
+ $staticPrefix,
+ new ArrayIterator($this->resources),
+ RegexFilterIterator::FILTER_KEY
+ );
+ }
+
+ /**
+ * Returns an iterator for a glob.
+ *
+ * @param string $glob The glob.
+ *
+ * @return GlobFilterIterator|Resource[] The iterator.
+ */
+ protected function getGlobIterator($glob)
+ {
+ return new GlobFilterIterator(
+ $glob,
+ new ArrayIterator($this->resources),
+ GlobFilterIterator::FILTER_KEY
+ );
+ }
}
|
Simplified InMemoryRepository
|
puli_repository
|
train
|
d8399a4bf249109ccaecdbf6bd20413e64f50b32
|
diff --git a/ciscosparkapi/api/messages.py b/ciscosparkapi/api/messages.py
index <HASH>..<HASH> 100644
--- a/ciscosparkapi/api/messages.py
+++ b/ciscosparkapi/api/messages.py
@@ -236,7 +236,8 @@ class MessagesAPI(object):
if markdown:
post_data[u'markdown'] = utf8(markdown)
if files:
- post_data[u'files'] = utf8(files)
+ files = map(utf8, files)
+ post_data[u'files'] = files
# API request
json_obj = self.session.post('messages', json=post_data)
# Return a Message object created from the response JSON data
|
Fix Bug #8 - Message posting with File URL isn't working
Treat files variable as an list (correct type) and ensure its contents
are utf8 strings by applying the utf8 function via map.
|
CiscoDevNet_webexteamssdk
|
train
|
2851a4d06a3435548d53e7b4b1cf06d2bdb9786d
|
diff --git a/androguard/gui/mainwindow.py b/androguard/gui/mainwindow.py
index <HASH>..<HASH> 100644
--- a/androguard/gui/mainwindow.py
+++ b/androguard/gui/mainwindow.py
@@ -156,8 +156,6 @@ class MainWindow(QtGui.QMainWindow):
androconf.debug("curentTabChanged -> %d" % index)
if index == -1:
return # all tab closed
- sourcewin = self.central.widget(index)
- sourcewin.reload_java_sources()
def cleanCentral(self):
#TOFIX: Removes all the pages, but does not delete them.
@@ -201,6 +199,7 @@ class MainWindow(QtGui.QMainWindow):
sourcewin = self.getMeSourceWindowIfExists(path)
if not sourcewin:
sourcewin = SourceWindow(win=self, path=path)
+ sourcewin.reload_java_sources()
self.central.addTab(sourcewin, sourcewin.title)
self.central.setTabToolTip(self.central.indexOf(sourcewin), sourcewin.path)
if method:
diff --git a/androguard/gui/sourcewindow.py b/androguard/gui/sourcewindow.py
index <HASH>..<HASH> 100644
--- a/androguard/gui/sourcewindow.py
+++ b/androguard/gui/sourcewindow.py
@@ -66,11 +66,9 @@ class SourceWindow(QtGui.QTextBrowser):
os.makedirs(self.ospath)
except OSError:
pass
-
arg = class2func(self.path)
self.class_item = getattr(self.mainwin.d, arg)
- self.createActions()
self.setReadOnly(True)
self.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
@@ -156,27 +154,26 @@ class SourceWindow(QtGui.QTextBrowser):
self.actionXref()
elif key == QtCore.Qt.Key_I:
self.actionInfo()
+ elif key == QtCore.Qt.Key_R:
+ self.reload_java_sources()
- def createActions(self):
- self.xrefAct = QtGui.QAction("Xref from...", self,
+ def CustomContextMenuHandler(self, pos):
+ menu = QtGui.QMenu(self)
+ menu.addAction(QtGui.QAction("Xref from...", self,
statusTip="List the references where this element is used",
- triggered=self.actionXref)
- self.gotoAct = QtGui.QAction("Go to...", self,
+ triggered=self.actionXref))
+ menu.addAction(QtGui.QAction("Go to...", self,
statusTip="Go to element definition",
- triggered=self.actionGoto)
- self.renameAct = QtGui.QAction("Rename...", self,
+ triggered=self.actionGoto))
+ menu.addAction(QtGui.QAction("Rename...", self,
statusTip="Rename an element (class, method, ...)",
- triggered=self.actionRename)
- self.infoAct = QtGui.QAction("Info...", self,
+ triggered=self.actionRename))
+ menu.addAction(QtGui.QAction("Info...", self,
statusTip="Display info of an element (anything useful in the document)",
- triggered=self.actionInfo)
-
- def CustomContextMenuHandler(self, pos):
- menu = QtGui.QMenu(self)
- menu.addAction(self.xrefAct)
- menu.addAction(self.gotoAct)
- menu.addAction(self.renameAct)
- menu.addAction(self.infoAct)
+ triggered=self.actionInfo))
+ menu.addAction(QtGui.QAction("Reload sources...", self,
+ statusTip="Reload sources (needed when renaming changed other tabs)",
+ triggered=self.reload_java_sources))
menu.exec_(QtGui.QCursor.pos())
def actionXref(self):
|
androgui: Reload is manual for now (no more scrolling when changing tab)
|
androguard_androguard
|
train
|
1d03cde18b31378a999c4a617a6cd28190125124
|
diff --git a/lib/seahorse/client/net_http_handler.rb b/lib/seahorse/client/net_http_handler.rb
index <HASH>..<HASH> 100644
--- a/lib/seahorse/client/net_http_handler.rb
+++ b/lib/seahorse/client/net_http_handler.rb
@@ -49,7 +49,7 @@ module Seahorse
# extract HTTP status code and headers
response.status_code = resp.code.to_i
- response.headers.update(resp.to_hash)
+ response.headers.update(response_headers(resp))
# read the body in chunks
resp.read_body do |chunk|
@@ -104,6 +104,15 @@ module Seahorse
headers
end
+ # @param [Net::HTTP::Response] response
+ # @return [Hash<String, String>]
+ def response_headers(response)
+ response.to_hash.inject({}) do |headers, (k, v)|
+ headers[k] = v.first
+ headers
+ end
+ end
+
end
end
end
diff --git a/test/seahorse/client/net_http_handler_test.rb b/test/seahorse/client/net_http_handler_test.rb
index <HASH>..<HASH> 100644
--- a/test/seahorse/client/net_http_handler_test.rb
+++ b/test/seahorse/client/net_http_handler_test.rb
@@ -139,7 +139,7 @@ module Seahorse
resp.complete?.must_equal(true)
end
- describe 'endpoint' do
+ describe 'request endpoint' do
it 'makes a request against the given endpoint' do
@endpoint = Endpoint.new('http://foo.bar.com')
@@ -161,7 +161,7 @@ module Seahorse
end
- describe 'http method' do
+ describe 'request http method' do
it 'uses the http_request#http_method to make the request' do
http_request.http_method = 'POST'
@@ -179,7 +179,7 @@ module Seahorse
end
- describe 'headers' do
+ describe 'request headers' do
it 'passes along http_request#headers' do
http_request.headers['abc'] = 'xyz'
@@ -217,7 +217,7 @@ module Seahorse
end
- describe 'path' do
+ describe 'request path' do
it 'sends the request with the correct request uri' do
http_request.path = '/path'
@@ -233,7 +233,7 @@ module Seahorse
end
- describe 'body' do
+ describe 'request body' do
it 'sends the body' do
http_request.body = StringIO.new('request-body')
@@ -243,6 +243,24 @@ module Seahorse
end
+ describe 'response' do
+
+ it 'populates the status code' do
+ stub_request(:any, endpoint).to_return(status: 200)
+ make_request.http_response.status_code.must_equal(200)
+ end
+
+ it 'populates the headers' do
+ stub_request(:any, endpoint).to_return(headers: { foo: 'bar' })
+ make_request.http_response.headers['foo'].must_equal('bar')
+ end
+
+ it 'populates the response body' do
+ stub_request(:any, endpoint).to_return(body: 'response-body')
+ make_request.http_response.body.must_equal('response-body')
+ end
+
+ end
end
end
end
|
Added NetHttpHandler response parsing tests.
|
aws_aws-sdk-ruby
|
train
|
ac88b666db62f7423a17a9974d61c6aa475e7794
|
diff --git a/lib/adhearsion/configuration.rb b/lib/adhearsion/configuration.rb
index <HASH>..<HASH> 100644
--- a/lib/adhearsion/configuration.rb
+++ b/lib/adhearsion/configuration.rb
@@ -25,7 +25,7 @@ module Adhearsion
level :info, :desc => <<-__
Supported levels (in increasing severity) -- :trace < :debug < :info < :warn < :error < :fatal
__
- outputters "log/adhearsion.log", :desc => <<-__
+ outputters ["log/adhearsion.log"], :desc => <<-__
An array of log outputters to use. The default is to log to stdout and log/adhearsion.log
__
formatter nil, :desc => <<-__
diff --git a/spec/adhearsion/initializer_spec.rb b/spec/adhearsion/initializer_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/adhearsion/initializer_spec.rb
+++ b/spec/adhearsion/initializer_spec.rb
@@ -77,7 +77,7 @@ describe Adhearsion::Initializer do
stub_behavior_for_initializer_with_no_path_changing_behavior do
flexmock(File).should_receive(:open).with(File.join(path, 'adhearsion.pid'), 'w', Proc).at_least.once
ahn = Adhearsion::Initializer.start path, :pid_file => true
- ahn.resolve_log_file_path.should == path + Adhearsion.config.platform.logging.outputters
+ ahn.resolve_log_file_path.should == path + Adhearsion.config.platform.logging.outputters[0]
end
end
|
outputters defined as an array instead of a String
it makes easier to add a new outputter:
```
Adhearsion.config do |config|
config.platform.logging.outputters << "log/test.log"
end
```
|
adhearsion_adhearsion
|
train
|
133a74df22d6b38afd842a27eed8f7ffad3a1977
|
diff --git a/app/js/timer.js b/app/js/timer.js
index <HASH>..<HASH> 100644
--- a/app/js/timer.js
+++ b/app/js/timer.js
@@ -6,6 +6,7 @@ angular.module('timer', [])
scope: {
interval: '=interval',
startTimeAttr: '=startTime',
+ endTimeAttr: '=endTime',
countdownattr: '=countdown',
autoStart: '&autoStart'
},
@@ -24,6 +25,7 @@ angular.module('timer', [])
}
$scope.startTime = null;
+ $scope.endTime = null;
$scope.timeoutId = null;
$scope.countdown = $scope.countdownattr && parseInt($scope.countdownattr, 10) >= 0 ? parseInt($scope.countdownattr, 10) : undefined;
$scope.isRunning = false;
@@ -48,7 +50,17 @@ angular.module('timer', [])
$scope.start = $element[0].start = function () {
$scope.startTime = $scope.startTimeAttr ? new Date($scope.startTimeAttr) : new Date();
- $scope.countdown = $scope.countdownattr && parseInt($scope.countdownattr, 10) > 0 ? parseInt($scope.countdownattr, 10) : undefined;
+ $scope.endTime = $scope.endTimeAttr ? new Date($scope.endTimeAttr) : null;
+ if ($scope.endTime) {
+ var difference = $scope.endTime - new Date();
+ if (difference > 0) {
+ $scope.countdown = difference/1000|0;
+ } else {
+ $scope.countdown = 0;
+ }
+ } else {
+ $scope.countdown = $scope.countdownattr && parseInt($scope.countdownattr, 10) > 0 ? parseInt($scope.countdownattr, 10) : undefined;
+ }
resetTimeout();
tick();
};
@@ -93,7 +105,7 @@ angular.module('timer', [])
$scope.millis = new Date() - $scope.startTime;
var adjustment = $scope.millis % 1000;
- if ($scope.countdownattr) {
+ if ($scope.countdownattr || $scope.endTimeAttr) {
$scope.millis = $scope.countdown * 1000;
}
|
Add endTime attribute to timer directive.
|
siddii_angular-timer
|
train
|
8cb22efb833ce957a32080b4dbe3c8237f6433ec
|
diff --git a/example-plugins/homebridge-samplePlatform/index.js b/example-plugins/homebridge-samplePlatform/index.js
index <HASH>..<HASH> 100644
--- a/example-plugins/homebridge-samplePlatform/index.js
+++ b/example-plugins/homebridge-samplePlatform/index.js
@@ -74,6 +74,10 @@ SamplePlatform.prototype.configureAccessory = function(accessory) {
// accessory.updateReachability()
accessory.reachable = true;
+ accessory.on('identify', function() {
+ console.log("Identify!!!");
+ });
+
if (accessory.getService(Service.Lightbulb)) {
accessory.getService(Service.Lightbulb)
.getCharacteristic(Characteristic.On)
@@ -178,7 +182,9 @@ SamplePlatform.prototype.addAccessory = function(accessoryName) {
uuid = UUIDGen.generate(accessoryName);
var newAccessory = new Accessory(accessoryName, uuid);
-
+ newAccessory.on('identify', function() {
+ console.log("Identify!!!");
+ });
// Plugin can save context on accessory
// To help restore accessory in configureAccessory()
// newAccessory.context.something = "Something"
diff --git a/lib/platformAccessory.js b/lib/platformAccessory.js
index <HASH>..<HASH> 100644
--- a/lib/platformAccessory.js
+++ b/lib/platformAccessory.js
@@ -2,6 +2,8 @@ var uuid = require("hap-nodejs").uuid;
var Accessory = require("hap-nodejs").Accessory;
var Service = require("hap-nodejs").Service;
var Characteristic = require("hap-nodejs").Characteristic;
+var inherits = require('util').inherits;
+var EventEmitter = require('events').EventEmitter;
'use strict';
@@ -33,6 +35,8 @@ function PlatformAccessory(displayName, UUID, category) {
.setCharacteristic(Characteristic.SerialNumber, "Default-SerialNumber");
}
+inherits(PlatformAccessory, EventEmitter);
+
PlatformAccessory.prototype.addService = function(service) {
// service might be a constructor like `Service.AccessoryInformation` instead of an instance
// of Service. Coerce if necessary.
|
Add a example to use "identify" event
|
nfarina_homebridge
|
train
|
3bbf1fab465fe0af3145ea94902e8855b6cff378
|
diff --git a/sharding-jdbc/sharding-jdbc-core/src/main/java/io/shardingsphere/shardingjdbc/jdbc/adapter/AbstractConnectionAdapter.java b/sharding-jdbc/sharding-jdbc-core/src/main/java/io/shardingsphere/shardingjdbc/jdbc/adapter/AbstractConnectionAdapter.java
index <HASH>..<HASH> 100644
--- a/sharding-jdbc/sharding-jdbc-core/src/main/java/io/shardingsphere/shardingjdbc/jdbc/adapter/AbstractConnectionAdapter.java
+++ b/sharding-jdbc/sharding-jdbc-core/src/main/java/io/shardingsphere/shardingjdbc/jdbc/adapter/AbstractConnectionAdapter.java
@@ -21,7 +21,6 @@ import com.google.common.base.Preconditions;
import com.google.common.collect.LinkedHashMultimap;
import com.google.common.collect.Multimap;
import io.shardingsphere.core.constant.ConnectionMode;
-import io.shardingsphere.core.exception.ShardingException;
import io.shardingsphere.core.hint.HintManagerHolder;
import io.shardingsphere.core.routing.router.masterslave.MasterVisitedManager;
import io.shardingsphere.shardingjdbc.jdbc.adapter.executor.ForceExecuteCallback;
@@ -160,7 +159,7 @@ public abstract class AbstractConnectionAdapter extends AbstractUnsupportedOpera
for (Connection each : result) {
each.close();
}
- throw new ShardingException(String.format("Could't get %d connections one time, partition succeed connection(%d) have released!", connectionSize, result.size()));
+ throw new SQLException(String.format("Could't get %d connections one time, partition succeed connection(%d) have released!", connectionSize, result.size()), ex);
}
}
return result;
diff --git a/sharding-proxy/src/main/java/io/shardingsphere/shardingproxy/backend/jdbc/datasource/JDBCBackendDataSource.java b/sharding-proxy/src/main/java/io/shardingsphere/shardingproxy/backend/jdbc/datasource/JDBCBackendDataSource.java
index <HASH>..<HASH> 100644
--- a/sharding-proxy/src/main/java/io/shardingsphere/shardingproxy/backend/jdbc/datasource/JDBCBackendDataSource.java
+++ b/sharding-proxy/src/main/java/io/shardingsphere/shardingproxy/backend/jdbc/datasource/JDBCBackendDataSource.java
@@ -130,11 +130,11 @@ public final class JDBCBackendDataSource implements BackendDataSource, AutoClose
for (int i = 0; i < connectionSize; i++) {
try {
result.add(dataSource.getConnection());
- } catch (final SQLException ignored) {
+ } catch (final SQLException ex) {
for (Connection each : result) {
each.close();
}
- throw new ShardingException(String.format("Could't get %d connections one time, partition succeed connection(%d) have released!", connectionSize, result.size()));
+ throw new SQLException(String.format("Could't get %d connections one time, partition succeed connection(%d) have released!", connectionSize, result.size()), ex);
}
}
return result;
diff --git a/sharding-proxy/src/test/java/io/shardingsphere/shardingproxy/backend/jdbc/datasource/JDBCBackendDataSourceTest.java b/sharding-proxy/src/test/java/io/shardingsphere/shardingproxy/backend/jdbc/datasource/JDBCBackendDataSourceTest.java
index <HASH>..<HASH> 100644
--- a/sharding-proxy/src/test/java/io/shardingsphere/shardingproxy/backend/jdbc/datasource/JDBCBackendDataSourceTest.java
+++ b/sharding-proxy/src/test/java/io/shardingsphere/shardingproxy/backend/jdbc/datasource/JDBCBackendDataSourceTest.java
@@ -18,7 +18,6 @@
package io.shardingsphere.shardingproxy.backend.jdbc.datasource;
import io.shardingsphere.core.constant.ConnectionMode;
-import io.shardingsphere.core.exception.ShardingException;
import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
import org.junit.Before;
@@ -39,7 +38,6 @@ import java.util.concurrent.Future;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.instanceOf;
-import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
@@ -80,13 +78,9 @@ public class JDBCBackendDataSourceTest {
assertEquals(5, actual.size());
}
- @Test
+ @Test(expected = SQLException.class)
public void assertGetConnectionsFailed() throws SQLException {
- try {
- jdbcBackendDataSource.getConnections(ConnectionMode.MEMORY_STRICTLY, "ds_1", 6);
- } catch (final ShardingException ex) {
- assertThat(ex.getMessage(), is("Could't get 6 connections one time, partition succeed connection(5) have released!"));
- }
+ jdbcBackendDataSource.getConnections(ConnectionMode.MEMORY_STRICTLY, "ds_1", 6);
}
@Test
|
for #<I>, throw SQLException instead of ShardingException
|
apache_incubator-shardingsphere
|
train
|
a8562bd7148131d3ec4b732096cdf7b210433e1e
|
diff --git a/public/js/admin/rcm-ckeditor.js b/public/js/admin/rcm-ckeditor.js
index <HASH>..<HASH> 100644
--- a/public/js/admin/rcm-ckeditor.js
+++ b/public/js/admin/rcm-ckeditor.js
@@ -142,20 +142,7 @@ function RcmCkEditor(config) {
* @return {*}
*/
me.getHtml5EditorData = function(editor) {
-
- var returnData = {};
-
- var tempData = $(editor).html();
-
- returnData.html = $.trim(tempData);
-
- if (returnData == undefined || returnData == '') {
- return false;
- }
-
- returnData.assets = me.getAssets(returnData.html);
-
- return returnData;
+ return me.getRichEditorData(editor);
};
me.getAssets = function (htmlToCheck) {
|
Fixed save on html5 edit areas.
|
reliv_Rcm
|
train
|
cbc48e6930409e523d8c8039fa9ccaa5c44df53d
|
diff --git a/lib/db/cortex.php b/lib/db/cortex.php
index <HASH>..<HASH> 100644
--- a/lib/db/cortex.php
+++ b/lib/db/cortex.php
@@ -736,7 +736,7 @@ class Cortex extends Cursor {
$has_filter=$this->mergeFilter([$has_filter,
[$this->rel($key)->getTable().'.'.$fromConf[1].'='.$this->getTable().'.'.$id]]);
$result = $this->_refSubQuery($key,$has_filter,$has_options);
- $addToFilter = ['exists('.$result[0].')']+$result[1];
+ $addToFilter = array_merge(['exists('.$result[0].')'],$result[1]);
}
elseif ($result = $this->_hasRefsIn($key,$has_filter,$has_options,$ttl))
$addToFilter = array($id.' IN ?', $result);
|
Fixes a bug in has( "something IN ?", array)
'+' between arrays overwrites elements with the same index, even if numeral. 'array_merge' doesn't.
|
ikkez_f3-cortex
|
train
|
efe962f01b2f0de0126e310799153ab53a0fdf60
|
diff --git a/lib/nagios_analyzer/section.rb b/lib/nagios_analyzer/section.rb
index <HASH>..<HASH> 100644
--- a/lib/nagios_analyzer/section.rb
+++ b/lib/nagios_analyzer/section.rb
@@ -16,5 +16,16 @@ module NagiosAnalyzer
self[:status] = (self[:current_state] == NagiosAnalyzer::Status::STATE_OK ? "OK" : "CRITICAL")
end
end
+
+ def <=>(other)
+ self.sort_array <=> other.sort_array
+ end
+
+ def sort_array
+ [ (self[:type] == "servicestatus" ? 1 : 0),
+ Status::STATES_ORDER[self[:current_state]].to_i,
+ self[:host_name],
+ self[:service_description].to_s ]
+ end
end
end
diff --git a/lib/nagios_analyzer/status.rb b/lib/nagios_analyzer/status.rb
index <HASH>..<HASH> 100644
--- a/lib/nagios_analyzer/status.rb
+++ b/lib/nagios_analyzer/status.rb
@@ -37,17 +37,17 @@ module NagiosAnalyzer
def host_items
@host_items ||= sections.map do |s|
Section.new(s) if s.start_with?("hoststatus") && in_scope?(s)
- end.compact
+ end.compact.sort
end
def service_items
@service_items ||= sections.map do |s|
Section.new(s) if s.start_with?("servicestatus") && in_scope?(s)
- end.compact
+ end.compact.sort
end
def items
- @items ||= host_items + service_items
+ @items ||= (host_items + service_items).sort
end
def in_scope?(section)
diff --git a/spec/nagios_analyzer_section_spec.rb b/spec/nagios_analyzer_section_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/nagios_analyzer_section_spec.rb
+++ b/spec/nagios_analyzer_section_spec.rb
@@ -33,4 +33,41 @@ describe NagiosAnalyzer::Section do
Section.new("hoststatus {\ncurrent_state=0\n}")[:status].should == "OK"
Section.new("hoststatus {\ncurrent_state=42\n}")[:status].should == "CRITICAL"
end
+
+ context "#sort" do
+ it "places servicestatus'es after hoststatus'es" do
+ a = Section.new("servicestatus {\ncurrent_state=0\n}")
+ b = Section.new("hoststatus {\ncurrent_state=0\n}")
+ [a,b].sort.should == [b,a]
+ end
+
+ it "places critical before unknown before warning before pending before dependent before ok" do
+ host = Section.new("hoststatus {\ncurrent_state=0\n}")
+ critical = Section.new("servicestatus {\ncurrent_state=2\n}")
+ unknown = Section.new("servicestatus {\ncurrent_state=3\n}")
+ warning = Section.new("servicestatus {\ncurrent_state=1\n}")
+ dependent = Section.new("servicestatus {\ncurrent_state=4\n}")
+ ok = Section.new("servicestatus {\ncurrent_state=0\n}")
+ [ok, unknown, dependent, critical, host, warning].sort.should == [host, critical, unknown, warning, dependent, ok]
+ end
+
+ it "sorts by host_name" do
+ a = Section.new("hoststatus {\ncurrent_state=0\nhost_name=a\n}")
+ b = Section.new("hoststatus {\ncurrent_state=0\nhost_name=b\n}")
+ [b,a].sort.should == [a,b]
+ end
+
+ it "sorts by service_description" do
+ a = Section.new("hoststatus {\ncurrent_state=0\n}")
+ b = Section.new("servicestatus {\ncurrent_state=0\nservice_description=b\n}")
+ c = Section.new("servicestatus {\ncurrent_state=0\nservice_description=c\n}")
+ [c,b,a].sort.should == [a,b,c]
+ end
+
+ it "has no problem even with missing fields (hostname don't have service_description)" do
+ a = Section.new("hoststatus {\ncurrent_state=0\n}")
+ b = Section.new("hoststatus {\ncurrent_state=0\n}")
+ [a,b].sort.should == [a,b]
+ end
+ end
end
|
Added sorting to Section's arrays
|
jbbarth_nagios_analyzer
|
train
|
019ac6a39a9c3d44fd51352b0cbf4252b012bd27
|
diff --git a/src/SerializerClassLoader.php b/src/SerializerClassLoader.php
index <HASH>..<HASH> 100644
--- a/src/SerializerClassLoader.php
+++ b/src/SerializerClassLoader.php
@@ -72,6 +72,10 @@ class SerializerClassLoader
{
$classMetadata = $this->metadataFactory->getMetadataForClass(get_class($object));
+ if (null === $classMetadata) {
+ throw new \RuntimeException('No mapping file was found for class ' . get_class($object) . '. Did you configure the correct paths for serializer?');
+ }
+
$fqn = $this->getClassName($classMetadata);
if (isset($this->instances[$fqn])) {
|
Added exception when a metadata is not found
|
tsantos84_serializer
|
train
|
153998fa3fd85614e9677132608427c8da8c8cc9
|
diff --git a/tests/test_test_if_else.py b/tests/test_test_if_else.py
index <HASH>..<HASH> 100644
--- a/tests/test_test_if_else.py
+++ b/tests/test_test_if_else.py
@@ -345,6 +345,27 @@ y = 5 if True else 4
self.data["DC_CODE"] = "def f(): return 'wrong' if True else 3"
sct_payload = helper.run(self.data)
self.assertFalse(sct_payload['correct'])
+class TestIfExpListComp(unittest.TestCase):
+ def setUp(self):
+ self.data = {
+ "DC_SOLUTION": """[i**2 if i> 5 else 0 for i in range(0,10)]""",
+ "DC_SCT": """
+test_list_comp(body = lambda: test_if_exp(
+ body=lambda: test_student_typed(r"\s*i\*\*2\s*"),
+ test= lambda: test_expression_result(context_vals = [6]),
+ orelse= lambda: test_expression_result())) """
+ }
+
+ def test_pass(self):
+ self.data["DC_CODE"] = self.data["DC_SOLUTION"]
+ sct_payload = helper.run(self.data)
+ self.assertTrue(sct_payload['correct'])
+
+ def test_fail_body(self):
+ self.data["DC_CODE"] = """[i**3 if i> 5 else 0 for i in range(0,10)]"""
+ sct_payload = helper.run(self.data)
+ self.assertFalse(sct_payload['correct'])
+
if __name__ == "__main__":
unittest.main()
|
test_if_exp unit tests w/ list comps
|
datacamp_pythonwhat
|
train
|
5b13f29a0fa7ea46aceaf57710ddb9657bf9855f
|
diff --git a/src/adLDAP/Connections/LDAP.php b/src/adLDAP/Connections/LDAP.php
index <HASH>..<HASH> 100644
--- a/src/adLDAP/Connections/LDAP.php
+++ b/src/adLDAP/Connections/LDAP.php
@@ -305,6 +305,18 @@ class LDAP implements ConnectionInterface
}
/**
+ * Adds entries to the current LDAP directory.
+ *
+ * @param $dn
+ * @param $entry
+ * @return bool
+ */
+ public function add($dn, $entry)
+ {
+ return @ldap_add($this->getConnection(), $dn, $entry);
+ }
+
+ /**
* Closes the current LDAP connection if
* it exists.
*
|
Added ldap_add method to ldap connection
|
Adldap2_Adldap2
|
train
|
e1fa59d675d1cbd088333dc3f1776a74483d7a7b
|
diff --git a/Grido/DataSources/DibiFluent.php b/Grido/DataSources/DibiFluent.php
index <HASH>..<HASH> 100644
--- a/Grido/DataSources/DibiFluent.php
+++ b/Grido/DataSources/DibiFluent.php
@@ -46,7 +46,7 @@ class DibiFluent extends \Nette\Object implements IDataSource
public function getCount()
{
$fluent = clone $this->fluent;
- return $fluent->removeClause('SELECT')->select('COUNT(*)')->fetchSingle();
+ return $fluent->removeClause('ORDER BY')->removeClause('SELECT')->select('COUNT(*)')->fetchSingle();
}
/**
|
DataSources\DibiFluent: Fixed for PostgreSQL
|
o5_grido
|
train
|
c21af84599e728194ade4f4b0fb90cbfe0955736
|
diff --git a/impl-servlet/src/main/java/com/ocpsoft/rewrite/servlet/config/rule/Join.java b/impl-servlet/src/main/java/com/ocpsoft/rewrite/servlet/config/rule/Join.java
index <HASH>..<HASH> 100644
--- a/impl-servlet/src/main/java/com/ocpsoft/rewrite/servlet/config/rule/Join.java
+++ b/impl-servlet/src/main/java/com/ocpsoft/rewrite/servlet/config/rule/Join.java
@@ -25,8 +25,6 @@ import com.ocpsoft.rewrite.config.ConditionBuilder;
import com.ocpsoft.rewrite.config.Operation;
import com.ocpsoft.rewrite.config.Rule;
import com.ocpsoft.rewrite.context.EvaluationContext;
-import com.ocpsoft.rewrite.event.InboundRewrite;
-import com.ocpsoft.rewrite.event.OutboundRewrite;
import com.ocpsoft.rewrite.event.Rewrite;
import com.ocpsoft.rewrite.param.Parameter;
import com.ocpsoft.rewrite.param.Parameterized;
@@ -39,6 +37,7 @@ import com.ocpsoft.rewrite.servlet.config.Substitute;
import com.ocpsoft.rewrite.servlet.config.rule.Join.JoinParameterBuilder;
import com.ocpsoft.rewrite.servlet.http.event.HttpInboundServletRewrite;
import com.ocpsoft.rewrite.servlet.http.event.HttpOutboundServletRewrite;
+import com.ocpsoft.rewrite.servlet.util.QueryStringBuilder;
/**
* {@link Rule} that creates a bi-directional rewrite rule between an externally facing URL and an internal server
@@ -126,13 +125,10 @@ public class Join implements Rule, Parameterized<JoinParameterBuilder, String>
}
else if (event instanceof HttpOutboundServletRewrite)
{
- List<String> nonQueryParameters = resourcePath.getPathExpression().getParameterNames();
-
- List<String> queryParameters = path.getPathExpression().getParameterNames();
- queryParameters.removeAll(nonQueryParameters);
+ List<String> parameters = getPathRequestParameters();
ConditionBuilder outbound = resourcePath;
- for (String name : queryParameters)
+ for (String name : parameters)
{
outbound = outbound.and(QueryString.parameterExists(name));
}
@@ -143,18 +139,43 @@ public class Join implements Rule, Parameterized<JoinParameterBuilder, String>
return false;
}
+ private List<String> getPathRequestParameters()
+ {
+ List<String> nonQueryParameters = resourcePath.getPathExpression().getParameterNames();
+
+ List<String> queryParameters = path.getPathExpression().getParameterNames();
+ queryParameters.removeAll(nonQueryParameters);
+ return queryParameters;
+ }
+
@Override
public void perform(final Rewrite event, final EvaluationContext context)
{
- if (event instanceof InboundRewrite)
+ if (event instanceof HttpInboundServletRewrite)
{
saveCurrentJoin(((HttpInboundServletRewrite) event).getRequest());
Forward.to(resource).perform(event, context);
}
- else if (event instanceof OutboundRewrite)
+ else if (event instanceof HttpOutboundServletRewrite)
{
- Substitute.with(pattern).perform(event, context);
+ List<String> parameters = getPathRequestParameters();
+
+ String outboundURL = ((HttpOutboundServletRewrite) event).getOutboundURL();
+ QueryStringBuilder query = QueryStringBuilder.begin();
+ if (outboundURL.contains("?"))
+ {
+ query.addParameters(outboundURL);
+ for (String string : parameters) {
+ List<String> values = query.removeParameter(string);
+ if (values.size() > 1)
+ {
+ query.addParameter(string, values.subList(1, values.size()).toArray(new String[] {}));
+ }
+ }
+ }
+
+ Substitute.with(pattern + query.toQueryString()).perform(event, context);
}
}
diff --git a/impl-servlet/src/main/java/com/ocpsoft/rewrite/servlet/util/QueryStringBuilder.java b/impl-servlet/src/main/java/com/ocpsoft/rewrite/servlet/util/QueryStringBuilder.java
index <HASH>..<HASH> 100644
--- a/impl-servlet/src/main/java/com/ocpsoft/rewrite/servlet/util/QueryStringBuilder.java
+++ b/impl-servlet/src/main/java/com/ocpsoft/rewrite/servlet/util/QueryStringBuilder.java
@@ -125,6 +125,13 @@ public class QueryStringBuilder
return this;
}
+ public void addParameter(String name, String... values)
+ {
+ Map<String, String[]> parameter = new LinkedHashMap<String, String[]>();
+ parameter.put(name, values);
+ addParameterArrays(parameter);
+ }
+
/**
* Add parameters from the given map of name=value pairs. For parameters with more than one value, each value will be
* appended using the same name. If a parameter already exists, append new values to the existing list of values for
@@ -351,6 +358,7 @@ public class QueryStringBuilder
private class NullEncodingHandler implements EncodingHandler
{
+ @Override
public String encode(final String value)
{
return value;
@@ -359,6 +367,7 @@ public class QueryStringBuilder
private class QSDecoder implements EncodingHandler
{
+ @Override
public String encode(final String value)
{
try {
@@ -377,6 +386,7 @@ public class QueryStringBuilder
private class QSEncoder implements EncodingHandler
{
+ @Override
public String encode(final String value)
{
try {
|
Join now respects query strings in outbound URLs
|
ocpsoft_rewrite
|
train
|
4e4bbf5a5dfcef8824a948eb1aa066dd6100fd4f
|
diff --git a/lib/erector/rails/rails_form_builder.rb b/lib/erector/rails/rails_form_builder.rb
index <HASH>..<HASH> 100644
--- a/lib/erector/rails/rails_form_builder.rb
+++ b/lib/erector/rails/rails_form_builder.rb
@@ -4,7 +4,7 @@ module Erector
def initialize(object_name, object, template, options, proc)
@template = template
- @parent = ActionView::Helpers::FormBuilder.new(object_name, object, template, options, proc)
+ @parent = ActionView::Base.default_form_builder.new(object_name, object, template, options, proc)
end
def method_missing(method_name, *args, &block)
|
Make sure that we use whatever the default form builder for the app, in case it's been overridden by the user in rails environment.rb, instead of just assuming it's always the ActionView form builder
|
erector_erector
|
train
|
3fa1b9df7556cecd74f0d75bcc99ec5523d7278e
|
diff --git a/request.js b/request.js
index <HASH>..<HASH> 100644
--- a/request.js
+++ b/request.js
@@ -29,8 +29,10 @@ function TChannelRequest(channel, options) {
self.channel = channel;
self.options = options;
self.outReqs = [];
+ self.timeout = self.options.timeout;
self.start = 0;
self.end = 0;
+ self.elapsed = 0;
self.headers = self.options.headers || {}; // so that as-foo can punch req.headers.X
self.options.headers = self.headers; // for passing to peer.request(opts) later
@@ -64,11 +66,34 @@ TChannelRequest.prototype.send = function send(arg1, arg2, arg3, callback) {
}
};
+TChannelRequest.prototype.resend = function resend() {
+ var self = this;
+ var outReq = self.makeOutRequest();
+ outReq.send(self.arg1, self.arg2, self.arg3, outReqRedone);
+ function outReqRedone(err, res, arg2, arg3) {
+ self.onReqDone(err, res, arg2, arg3);
+ }
+};
+
TChannelRequest.prototype.onReqDone = function onReqDone(err, res, arg2, arg3) {
var self = this;
var now = self.channel.timers.now();
- self.end = now;
- self._callback(err, res, arg2, arg3);
+ self.elapsed = now - self.start;
+ if (self.elapsed < self.timeout &&
+ self.shouldRetry(err, res, arg2, arg3)) {
+ process.nextTick(deferResend);
+ } else {
+ self.end = now;
+ self._callback(err, res, arg2, arg3);
+ }
+ function deferResend() {
+ self.resend();
+ }
+};
+
+TChannelRequest.prototype.shouldRetry = function shouldRetry(err, res, arg2, arg3) {
+ var self = this;
+ return false;
};
module.exports = TChannelRequest;
|
TChannelRequest: add dark retry path
|
uber_tchannel-node
|
train
|
31cdd7a3dab4fb2bb2a61b9c80d2e7c9fee36b2b
|
diff --git a/jujupy.py b/jujupy.py
index <HASH>..<HASH> 100644
--- a/jujupy.py
+++ b/jujupy.py
@@ -663,7 +663,7 @@ class EnvJujuClient26(EnvJujuClient):
"""
if self._use_jes:
return
- if self.is_jes_enabled() and self.get_jes_command() == 'controller':
+ if self.is_jes_enabled():
raise JESByDefault()
self._use_jes = True
if not self.is_jes_enabled():
diff --git a/tests/test_jujupy.py b/tests/test_jujupy.py
index <HASH>..<HASH> 100644
--- a/tests/test_jujupy.py
+++ b/tests/test_jujupy.py
@@ -122,8 +122,7 @@ class TestEnvJujuClient26(ClientTest, CloudSigmaTest):
client.enable_jes()
self.assertFalse(client._use_jes)
assert_juju_call(
- self, po_mock, client, ('juju', '--show-log', 'help', 'commands'),
- call_index=1)
+ self, po_mock, client, ('juju', '--show-log', 'help', 'commands'))
def test_enable_jes_unsupported(self):
client = self.client_class(
|
Do not check for controller in enable_jes.
|
juju_juju
|
train
|
bf51dbdafbc0d9586a6898ee0b89329116d30507
|
diff --git a/src/georegression/geometry/GeometryMath_F64.java b/src/georegression/geometry/GeometryMath_F64.java
index <HASH>..<HASH> 100644
--- a/src/georegression/geometry/GeometryMath_F64.java
+++ b/src/georegression/geometry/GeometryMath_F64.java
@@ -27,10 +27,11 @@ import org.ejml.data.DenseMatrix64F;
/**
- * Math operations that can be applied to geometric primatives.
+ * Math operations that can be applied to geometric primitives.
*
* @author Peter Abeles
*/
+// TODO rename to PerspectiveMath?
// todo separate off fucntions that are in homogeneous coordinates into their own class?
// alternatively indicate by the function name?
// todo make sure all functions have unit tests
diff --git a/src/georegression/geometry/UtilTrig_F64.java b/src/georegression/geometry/UtilTrig_F64.java
index <HASH>..<HASH> 100644
--- a/src/georegression/geometry/UtilTrig_F64.java
+++ b/src/georegression/geometry/UtilTrig_F64.java
@@ -65,6 +65,12 @@ public class UtilTrig_F64 {
return a.x*b.x + a.y*b.y + a.z*b.z;
}
+ public static double acuteAngle( GeoTuple3D_F64 a , GeoTuple3D_F64 b ) {
+ double dot = a.x*b.x + a.y*b.y + a.z*b.z;
+ double bottom = a.norm()*b.norm();
+ return Math.acos(dot/bottom);
+ }
+
public static double acuteAngle( double vx_a, double vy_a,
double vx_b, double vy_b ) {
double r_a = Math.sqrt( vx_a * vx_a + vy_a * vy_a );
|
- Added acute angle and comments
|
lessthanoptimal_GeoRegression
|
train
|
fce87db5c3b5750213652e24f2941a92116adbbb
|
diff --git a/src/FeatureflowClient.js b/src/FeatureflowClient.js
index <HASH>..<HASH> 100644
--- a/src/FeatureflowClient.js
+++ b/src/FeatureflowClient.js
@@ -102,8 +102,6 @@ export default class FeatureflowClient{
var d = new Date();
//these could be event or session attributes ie not persisted directly to user but added to a seperate attributes map
const featureflowAttributes = {
- 'featureflow.date': new Date(),
- 'featureflow.hourofday': d.getHours()
};
const attributes = {
...user.attributes,
|
Removed auto created attributes
To improve performance and simplify
|
featureflow_featureflow-javascript-sdk
|
train
|
6da399beed21dddd2bf81a5f6094e1486c74bcab
|
diff --git a/python_modules/dagster/dagster/core/definitions/decorators/repository.py b/python_modules/dagster/dagster/core/definitions/decorators/repository.py
index <HASH>..<HASH> 100644
--- a/python_modules/dagster/dagster/core/definitions/decorators/repository.py
+++ b/python_modules/dagster/dagster/core/definitions/decorators/repository.py
@@ -41,7 +41,7 @@ class _Repository(object):
or isinstance(definition, PartitionSetDefinition)
or isinstance(definition, ScheduleDefinition)
):
- bad_definitions.append(i, type(definition))
+ bad_definitions.append((i, type(definition)))
if bad_definitions:
raise DagsterInvalidDefinitionError(
'Bad return value from repository construction function: all elements of list '
diff --git a/python_modules/dagster/dagster_tests/core_tests/definitions_tests/test_repository_definition.py b/python_modules/dagster/dagster_tests/core_tests/definitions_tests/test_repository_definition.py
index <HASH>..<HASH> 100644
--- a/python_modules/dagster/dagster_tests/core_tests/definitions_tests/test_repository_definition.py
+++ b/python_modules/dagster/dagster_tests/core_tests/definitions_tests/test_repository_definition.py
@@ -133,3 +133,11 @@ def test_key_mismatch():
with pytest.raises(Exception, match='name in PipelineDefinition does not match'):
some_repo.get_pipeline('foo')
+
+
+def test_non_pipeline_in_pipelines():
+ with pytest.raises(DagsterInvalidDefinitionError, match='all elements of list must be of type'):
+
+ @repository
+ def _some_repo():
+ return ['not-a-pipeline']
|
Fix error message when repository contains non-pipeline-like objects (#<I>)
Closes #<I>
|
dagster-io_dagster
|
train
|
92c2ac8e4f05cd78a855531144b02278359695a5
|
diff --git a/lib/spaceship/certificate.rb b/lib/spaceship/certificate.rb
index <HASH>..<HASH> 100644
--- a/lib/spaceship/certificate.rb
+++ b/lib/spaceship/certificate.rb
@@ -57,6 +57,36 @@ module Spaceship
end
def factory(attrs)
+ # TODO: does this belong here?
+
+ # Example:
+ # => {"name"=>"iOS Distribution: SunApps GmbH",
+ # "certificateId"=>"XC5PH8DAAA",
+ # "serialNumber"=>"797E732CCE8B7AAA",
+ # "status"=>"Issued",
+ # "statusCode"=>0,
+ # "expirationDate"=>#<DateTime: 2015-11-25T22:45:50+00:00 ((2457352j,81950s,0n),+0s,2299161j)>,
+ # "certificatePlatform"=>"ios",
+ # "certificateType"=>
+ # {"certificateTypeDisplayId"=>"R58UK2EAAA",
+ # "name"=>"iOS Distribution",
+ # "platform"=>"ios",
+ # "permissionType"=>"distribution",
+ # "distributionType"=>"store",
+ # "distributionMethod"=>"app",
+ # "ownerType"=>"team",
+ # "daysOverlap"=>364,
+ # "maxActive"=>2}}
+
+ if attrs['certificateType']
+ # On some accounts this is nested, so we need to flatten it
+ attrs.merge!(attrs['certificateType'])
+ attrs.delete('certificateType')
+ end
+
+ puts attrs
+
+ # Here we go
klass = CERTIFICATE_TYPE_IDS[attrs['certificateTypeDisplayId']]
klass ||= Certificate
klass.new(attrs)
|
Added properly parsing of different kind of server response
|
fastlane_fastlane
|
train
|
0689c96a0d2a75e4ba7db356ab4495efb1ae932d
|
diff --git a/lib/payson_api/response/payment_details.rb b/lib/payson_api/response/payment_details.rb
index <HASH>..<HASH> 100644
--- a/lib/payson_api/response/payment_details.rb
+++ b/lib/payson_api/response/payment_details.rb
@@ -24,7 +24,7 @@ class PaymentDetails
case @payment_type
when 'GUARANTEE'
@guarantee_status = data['guaranteeStatus']
- @guarantee_deadline_at = Time.at(data['guaranteeDeadlineTimestamp'])
+ @guarantee_deadline_at = Time.parse(CGI.unescape(data['guaranteeDeadlineTimestamp']))
when 'INVOICE'
@invoice_status = data['invoiceStatus']
if %w[ORDERCREATED SHIPPED DONE CREDITED].include?(@invoice_status)
|
Bug fix: "guaranteeDeadlineTimestamp" arrives HTML-escaped.
|
stoffus_payson_api
|
train
|
279b9ed6b5b5435bc4b44eec2765f55a5ab55f78
|
diff --git a/README.md b/README.md
index <HASH>..<HASH> 100644
--- a/README.md
+++ b/README.md
@@ -15,19 +15,17 @@ Not intended to be the Next Big Thing, more of a stepping stone to make your cod
- Write declarative views with ES6 template strings
- Built-in event delegation
- Lightweight and not overly opinionated
-- Components as siblings rather than parent/child with easy cross-component interop
- Easy to learn, with very few proprietary concepts to remember
- Designed to promote an easy future refactor job to migrate your JavaScript code to Elm or something Elm-like.
###Architecture
Comp borrows the model/update/view pattern and one-way data flow from the Elm Architecture and React/Flux, with a few key differences:
-- All Comp components are siblings (no nesting or parent/child relationships)
- Actions (i.e., the "update" bit) are expressed with functions rather than a switch block
- While model and state remain internal to each component, a component's Actions can be called
from external sources, e.g., other components, the console etc. This allow easy interop
between components and allows your layout to be more loosely-coupled to your logic.
-- It currently doesn't use immutable data
+- The model is not immutable
Install
diff --git a/comp.js b/comp.js
index <HASH>..<HASH> 100644
--- a/comp.js
+++ b/comp.js
@@ -8,7 +8,7 @@
*
* Issues? Please visit https://github.com/brendan-jefferis/comp/issues
*
-* Date: 2017-01-08T09:14:09.274Z
+* Date: 2017-01-14T06:13:07.901Z
*/
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
@@ -123,6 +123,7 @@ function registerEventDelegator(component) {
}
function delegateEvent(e, component, componentHtmlTarget) {
+ e.stopPropagation();
var target = getEventTarget(e);
var action = getEventActionFromElement(e, target, componentHtmlTarget);
if (action.name === "") {
diff --git a/comp.min.js b/comp.min.js
index <HASH>..<HASH> 100644
--- a/comp.min.js
+++ b/comp.min.js
@@ -8,7 +8,7 @@
*
* Issues? Please visit https://github.com/brendan-jefferis/comp/issues
*
-* Date: 2017-01-08T09:14:09.274Z
+* Date: 2017-01-14T06:13:07.901Z
*/
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
@@ -123,6 +123,7 @@ function registerEventDelegator(component) {
}
function delegateEvent(e, component, componentHtmlTarget) {
+ e.stopPropagation();
var target = getEventTarget(e);
var action = getEventActionFromElement(e, target, componentHtmlTarget);
if (action.name === "") {
diff --git a/src/lib/comp-events.js b/src/lib/comp-events.js
index <HASH>..<HASH> 100644
--- a/src/lib/comp-events.js
+++ b/src/lib/comp-events.js
@@ -18,6 +18,7 @@ export function registerEventDelegator(component) {
}
export function delegateEvent(e, component, componentHtmlTarget) {
+ e.stopPropagation();
const target = getEventTarget(e);
const action = getEventActionFromElement(e, target, componentHtmlTarget);
if (action.name === "") {
|
Prevent delegated events of nested components propagating to parent component
|
brendan-jefferis_comp
|
train
|
ceeaf8967543e0da66abd116136570665b5bc320
|
diff --git a/tasks/typescript.js b/tasks/typescript.js
index <HASH>..<HASH> 100644
--- a/tasks/typescript.js
+++ b/tasks/typescript.js
@@ -20,12 +20,11 @@ module.exports = function (grunt) {
resolvePath:path.resolve,
readFile:function (file){
- var content = grunt.file.read(file);
+ var content = fs.readFileSync(file, 'utf8');
// strip UTF BOM
if(content.charCodeAt(0) === 0xFEFF){
content = content.slice(1);
}
-
return content;
},
dirName:path.dirname,
|
fix task failure when typescript looks for files that may be missing
typescript seems to look for files by trying to read them, this causes
problems as if the grunt.file.read fails it will fail the task.
|
k-maru_grunt-typescript
|
train
|
4fe11342d049f708c3416e5b29fa3ed5bfc3b393
|
diff --git a/js/bootstrap-typeahead.js b/js/bootstrap-typeahead.js
index <HASH>..<HASH> 100644
--- a/js/bootstrap-typeahead.js
+++ b/js/bootstrap-typeahead.js
@@ -35,7 +35,8 @@
constructor: Typeahead
, matcher: function (item, query) {
- return ~item.indexOf(query)
+ // ;_; http://jsperf.com/asdfdfasdfa
+ return ~item.toLowerCase().indexOf(query.toLowerCase())
}
, select: function () {
|
make case lookahead case insensitive
|
twbs_bootstrap
|
train
|
9009466481335d7967b8b62c8fcd6ddf169a50bb
|
diff --git a/src/japronto/app/__init__.py b/src/japronto/app/__init__.py
index <HASH>..<HASH> 100644
--- a/src/japronto/app/__init__.py
+++ b/src/japronto/app/__init__.py
@@ -21,10 +21,7 @@ class Application:
self._connections = set()
self._reaper_settings = reaper_settings or {}
self._error_handlers = []
- if log_request is not None:
- self._log_request = log_request
- else:
- self._log_request = debug
+ self._log_request = log_request
self._request_extensions = {}
self._protocol_factory = protocol_factory or Protocol
self._debug = debug
@@ -187,7 +184,12 @@ class Application:
# break reference and cleanup matcher buffer
del self._matcher
- def _run(self, *, host, port, worker_num=None, reloader_pid=None):
+ def _run(self, *, host, port, worker_num=None, reloader_pid=None,
+ debug=None):
+ self._debug = debug or self._debug
+ if self._debug and not self._log_request:
+ self._log_request = self._debug
+
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind((host, port))
@@ -229,7 +231,8 @@ class Application:
if worker.exitcode != 0:
print('Worker excited with code {}!'.format(worker.exitcode))
- def run(self, host='0.0.0.0', port=8080, *, worker_num=None, reload=False):
+ def run(self, host='0.0.0.0', port=8080, *, worker_num=None, reload=False,
+ debug=False):
if os.environ.get('_JAPR_IGNORE_RUN'):
return
@@ -243,4 +246,4 @@ class Application:
self._run(
host=host, port=port, worker_num=worker_num,
- reloader_pid=reloader_pid)
+ reloader_pid=reloader_pid, debug=debug)
|
allow overriding debug in run
|
squeaky-pl_japronto
|
train
|
2e4856c16ef138aa67fbb85e8719d6fa71bacca8
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -25,7 +25,7 @@ function fastifyCors (fastify, opts, next) {
} else {
if (opts.hideOptionsRoute !== undefined) hideOptionsRoute = opts.hideOptionsRoute
const corsOptions = Object.assign({}, defaultOptions, opts)
- fastify.addHook('onRequest', (req, reply, next) => {
+ fastify.addHook('onRequest', function onRequestCors (req, reply, next) {
onRequest(fastify, corsOptions, req, reply, next)
})
}
@@ -50,7 +50,7 @@ function fastifyCors (fastify, opts, next) {
}
function handleCorsOptionsDelegator (optionsResolver, fastify) {
- fastify.addHook('onRequest', (req, reply, next) => {
+ fastify.addHook('onRequest', function onRequestCors (req, reply, next) {
if (optionsResolver.length === 2) {
handleCorsOptionsCallbackDelegator(optionsResolver, fastify, req, reply, next)
return
|
chore: rename hook functions (#<I>)
|
fastify_fastify-cors
|
train
|
505722af119e8f8390ed8abc066a2f0ca573f475
|
diff --git a/tests/auth/test_backends.py b/tests/auth/test_backends.py
index <HASH>..<HASH> 100644
--- a/tests/auth/test_backends.py
+++ b/tests/auth/test_backends.py
@@ -1,7 +1,7 @@
import pytest
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
-from taskcluster.sync import Auth
+from taskcluster import Auth
from treeherder.auth.backends import (NoEmailException,
TaskclusterAuthBackend)
diff --git a/tests/webapp/api/test_auth.py b/tests/webapp/api/test_auth.py
index <HASH>..<HASH> 100644
--- a/tests/webapp/api/test_auth.py
+++ b/tests/webapp/api/test_auth.py
@@ -6,7 +6,7 @@ from rest_framework import status
from rest_framework.decorators import APIView
from rest_framework.response import Response
from rest_framework.test import APIRequestFactory
-from taskcluster.sync import Auth
+from taskcluster import Auth
from treeherder.model.models import User
from treeherder.webapp.api import permissions
diff --git a/treeherder/auth/backends.py b/treeherder/auth/backends.py
index <HASH>..<HASH> 100644
--- a/treeherder/auth/backends.py
+++ b/treeherder/auth/backends.py
@@ -4,7 +4,7 @@ import re
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from rest_framework.reverse import reverse
-from taskcluster.sync import Auth
+from taskcluster import Auth
from taskcluster.utils import scope_match
logger = logging.getLogger(__name__)
@@ -82,8 +82,8 @@ class TaskclusterAuthBackend(object):
return None
tc_auth = Auth()
- # see: https://docs.taskcluster.net/reference/platform/auth/api-docs#authenticateHawk
- # see: https://github.com/taskcluster/taskcluster-client.py/blob/master/README.md#authenticate-hawk-request
+ # https://docs.taskcluster.net/reference/platform/taskcluster-auth/references/api#authenticateHawk
+ # https://github.com/taskcluster/taskcluster-client.py#authenticate-hawk-request
result = tc_auth.authenticateHawk({
"authorization": auth_header,
"host": host,
|
Import Auth from taskcluster rather than taskcluster.sync
Since the latter no longer exists in taskcluster 1.x, whereas the former
works in all versions.
|
mozilla_treeherder
|
train
|
a081d6b4adb314691c1a90f6e389e23e5e0d05df
|
diff --git a/googleanalytics/columns.py b/googleanalytics/columns.py
index <HASH>..<HASH> 100644
--- a/googleanalytics/columns.py
+++ b/googleanalytics/columns.py
@@ -23,6 +23,12 @@ DIMENSIONS = {
'ga:dateHour': lambda date: utils.date.parse('{} {}'.format(date[:8], date[8:])),
}
+PYSLUG_OVERRIDES = {
+ '1dayUsers': 'active_1day_users',
+ '7dayUsers': 'active_7day_users',
+ '14dayUsers': 'active_14day_users',
+ '30dayUsers': 'active_30day_users',
+}
def escape_chars(value, chars=',;'):
if value is True:
@@ -44,6 +50,13 @@ def escape(method):
return escaped_method
+def pyslug(name):
+ """
+ Make name safe to use as an attribute name (Python identifier).
+ """
+ return PYSLUG_OVERRIDES.get(name) or re.sub(r'([A-Z])', r'_\1', name).lower()
+
+
class Column(object):
selectors = (
'eq',
@@ -85,7 +98,7 @@ class Column(object):
self.index = int(index.group(0))
else:
self.index = None
- self.pyslug = re.sub(r'([A-Z])', r'_\1', self.slug).lower()
+ self.pyslug = pyslug(self.slug)
self.attributes = attributes
self.name = attributes.get('uiName', column_id).replace('XX', str(self.index))
self.group = attributes.get('group')
@@ -191,7 +204,7 @@ class Segment(Column):
self.raw = raw
self.id = raw['segmentId']
self.report_type, self.slug = self.id.split('::')
- self.pyslug = re.sub(r'([A-Z])', r'_\1', self.slug).lower()
+ self.pyslug = pyslug(self.slug)
self.name = raw['name']
self.kind = raw['kind'].lower()
self.definition = raw['definition']
|
Override column names for active user metrics
Names for active users metrics start with a digit (e.g. '1dayUsers'),
so are not valid python identifiers.
|
debrouwere_google-analytics
|
train
|
e09099e639255f48a41c4c89e2d38eceacd41589
|
diff --git a/application/briefkasten/tests/test_submission.py b/application/briefkasten/tests/test_submission.py
index <HASH>..<HASH> 100644
--- a/application/briefkasten/tests/test_submission.py
+++ b/application/briefkasten/tests/test_submission.py
@@ -43,14 +43,6 @@ def test_submission_with_one_attachment_post(testing, dropbox_container, form):
index=0)
form['message'] = 'Hello there'
form.submit()
- fs_dropbox = join(dropbox_container.fs_path, listdir(dropbox_container.fs_path)[0])
- assert len(listdir(join(fs_dropbox, 'attach'))) == 1
- fs_attachments = join(
- dropbox_container.fs_path,
- listdir(dropbox_container.fs_path)[0], 'attach')
- fs_attachment = join(fs_attachments, listdir(fs_attachments)[0])
- assert open(fs_attachment).read().decode('utf-8') == \
- open(fs_attachment, 'r').read().decode('utf-8')
def test_upload_attachment_directly(testing, dropbox_container, browser, upload_url, submit_url):
@@ -86,20 +78,3 @@ def test_submission_with_multiple_attachments(dropbox_container, form):
index=0)
form['message'] = 'Hello there'
form.submit()
- fs_attachments = join(
- dropbox_container.fs_path,
- listdir(dropbox_container.fs_path)[0], 'attach')
- assert len(listdir(fs_attachments)) == 1
-
-
-def test_submission_generates_message_to_editors(dropbox_container, browser, submit_url):
- browser.post(submit_url, params=dict(message=u'Hello'))
- fs_message = join(
- dropbox_container.fs_path,
- listdir(dropbox_container.fs_path)[0], 'message')
- created_message = open(fs_message).read().decode('utf-8')
- assert u'Hello' in created_message
- editor_token = open(join(
- dropbox_container.fs_path,
- listdir(dropbox_container.fs_path)[0], 'editor_token')).read().decode('utf-8')
- assert editor_token in created_message
|
remove non-sensical assertions
since the dropbox (finally) cleans up after itself and thus tries hard
to avoid leaving any submitted material behind, these assertions don't
make any sense anymore.
at this point we can onl assert, that the form submission works w/o
erroring out.
once the processing has been refactored, we can test its individual stages
|
ZeitOnline_briefkasten
|
train
|
bcc07e9e5252615d4ee9a514ae64db6c4ea9cd36
|
diff --git a/django_pandas/managers.py b/django_pandas/managers.py
index <HASH>..<HASH> 100644
--- a/django_pandas/managers.py
+++ b/django_pandas/managers.py
@@ -83,9 +83,9 @@ class DataFrameQuerySet(QuerySet):
rs_kwargs = {}
if storage == 'wide':
- df = self.to_dataframe(*fields, index=index)
+ df = self.to_dataframe(fields, index=index)
else:
- df = self.to_dataframe(*fields)
+ df = self.to_dataframe(fields)
if values is None:
raise AssertionError('You must specify a values field')
|
Fixes an issue in to_timeseries.
|
chrisdev_django-pandas
|
train
|
64cd11efc2e7eefc12b3ae37c12a5620c2d01216
|
diff --git a/spec/raven/processors/sanitizedata_processor_spec.rb b/spec/raven/processors/sanitizedata_processor_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/raven/processors/sanitizedata_processor_spec.rb
+++ b/spec/raven/processors/sanitizedata_processor_spec.rb
@@ -108,18 +108,58 @@ describe Raven::Processor::SanitizeData do
expect(result["array"][0]['password']).to eq(Raven::Processor::SanitizeData::STRING_MASK)
end
- it 'sanitizes query strings' do
- data = {
- 'sentry.interfaces.Http' => {
- 'data' => {
- 'query_string' => 'foo=bar&password=secret'
+ context "query strings" do
+ it 'sanitizes' do
+ data = {
+ 'sentry.interfaces.Http' => {
+ 'data' => {
+ 'query_string' => 'foo=bar&password=secret'
+ }
}
}
- }
- result = @processor.process(data)
+ result = @processor.process(data)
- vars = result["sentry.interfaces.Http"]["data"]
- expect(vars["query_string"]).to_not include("secret")
+ vars = result["sentry.interfaces.Http"]["data"]
+ expect(vars["query_string"]).to_not include("secret")
+ end
+
+ it 'handles multiple values for a key' do
+ data = {
+ 'sentry.interfaces.Http' => {
+ 'data' => {
+ 'query_string' => 'foo=bar&foo=fubar&foo=barfoo'
+ }
+ }
+ }
+
+ result = @processor.process(data)
+
+ vars = result["sentry.interfaces.Http"]["data"]
+ query_string = vars["query_string"].split('&')
+ expect(query_string).to include("foo=bar")
+ expect(query_string).to include("foo=fubar")
+ expect(query_string).to include("foo=barfoo")
+ end
+
+ it 'handles url encoded keys and values' do
+ encoded_query_string = 'Bio+4%24=cA%24%7C-%7C+M%28%29n3%5E'
+ data = {
+ 'sentry.interfaces.Http' => {
+ 'data' => {
+ 'query_string' => encoded_query_string
+ }
+ }
+ }
+
+ result = @processor.process(data)
+
+ vars = result["sentry.interfaces.Http"]["data"]
+ expect(vars["query_string"]).to eq(encoded_query_string)
+ end
+
+ it 'handles url encoded values' do
+
+ end
end
end
|
Adding tests to account for multiple form values per key and for url encoded data.
|
getsentry_raven-ruby
|
train
|
a1c460bb6cea3c46aa071b5761327d2e5fa1f0e4
|
diff --git a/vuepress/.vuepress/config.js b/vuepress/.vuepress/config.js
index <HASH>..<HASH> 100644
--- a/vuepress/.vuepress/config.js
+++ b/vuepress/.vuepress/config.js
@@ -94,6 +94,15 @@ module.exports = {
link: 'https://marketplace.visualstudio.com/items?itemName=lokalise.i18n-ally'
}
]
+ },
+ {
+ text: '3rd Party Integrations',
+ items: [
+ {
+ text: 'Localazy',
+ link: 'https://localazy.com/blog/how-to-localize-vuejs-app-with-vue-i18n-and-localazy?utm_source=kazupon&utm_medium=banner&utm_campaign=sponsorships_kazupon&utm_content=logo'
+ }
+ ]
}
]
},
|
docs: add 3rd party integrations
|
kazupon_vue-i18n
|
train
|
b1b7502b602bcb1391c8a55ae60d2440cee420d0
|
diff --git a/classes/PodsAdmin.php b/classes/PodsAdmin.php
index <HASH>..<HASH> 100644
--- a/classes/PodsAdmin.php
+++ b/classes/PodsAdmin.php
@@ -2331,7 +2331,7 @@ class PodsAdmin {
$capabilities[] = 'delete_' . $capability_type;
if ( 1 == pods_var( 'capability_type_extra', $pod[ 'options' ], 1 ) ) {
- $capability_type_plural = $capability_type . 's';
+ $capability_type_plural = $capability_type . 's';
$capabilities[] = 'read_private_' . $capability_type_plural;
$capabilities[] = 'edit_' . $capability_type_plural;
@@ -2350,14 +2350,14 @@ class PodsAdmin {
if ( 'custom' == pods_var( 'capability_type', $pod[ 'options' ], 'terms' ) ) {
$capability_type = pods_var( 'capability_type_custom', $pod[ 'options' ], pods_var_raw( 'name', $pod ) . 's' );
- $capability_type .= '_term';
- $capability_type_plural = $capability_type . 's';
+ $capability_type .= '_term';
+ $capability_type_plural = $capability_type . 's';
- // Singular
- $capabilities[] = 'edit_' . $capability_type;
- $capabilities[] = 'delete_' . $capability_type;
- $capabilities[] = 'assign_' . $capability_type;
- // Plural
+ // Singular
+ $capabilities[] = 'edit_' . $capability_type;
+ $capabilities[] = 'delete_' . $capability_type;
+ $capabilities[] = 'assign_' . $capability_type;
+ // Plural
$capabilities[] = 'manage_' . $capability_type_plural;
$capabilities[] = 'edit_' . $capability_type_plural;
$capabilities[] = 'delete_' . $capability_type_plural;
|
Match indentation to surrounding code (spaced)
|
pods-framework_pods
|
train
|
ddcebeed9bc4aebdebf7222d2b9822381c213965
|
diff --git a/lib/hexdump/hexdump.rb b/lib/hexdump/hexdump.rb
index <HASH>..<HASH> 100644
--- a/lib/hexdump/hexdump.rb
+++ b/lib/hexdump/hexdump.rb
@@ -173,8 +173,9 @@ module Hexdump
# @return [nil]
#
# @raise [ArgumentError]
- # The given data does not define the `#each_byte` method, or
- # the `:output` value does not support the `#<<` method.
+ # The given data does not define the `#each_byte` method,
+ # the `:output` value does not support the `#<<` method or
+ # the `:base` value was unknown.
#
def Hexdump.dump(data,options={})
output = options.fetch(:output,STDOUT)
@@ -192,8 +193,10 @@ module Hexdump
:octal
when :binary, :bin, 2
:binary
- else
+ when nil
:hexadecimal
+ else
+ raise(ArgumentError,"unknown base #{options[:base].inspect}")
end
word_size = options.fetch(:word_size,1)
diff --git a/spec/hexdump_spec.rb b/spec/hexdump_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/hexdump_spec.rb
+++ b/spec/hexdump_spec.rb
@@ -47,6 +47,12 @@ describe Hexdump do
}.should raise_error(ArgumentError)
end
+ it "should only accept known :base values" do
+ lambda {
+ subject.dump(data, :base => :foo)
+ }.should raise_error(ArgumentError)
+ end
+
it "should append each line of the hexdump to the output" do
lines = []
subject.dump(data, :output => lines)
|
Raise an ArgumentError if :base is not recognized.
|
postmodern_hexdump
|
train
|
e5498eb18f717fc389b05cd3927c8f22c13c0bfe
|
diff --git a/doc/conf.py b/doc/conf.py
index <HASH>..<HASH> 100644
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -40,6 +40,9 @@ templates_path = ['_templates']
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
+# Tell RTD to use index.rst, not contents.rst
+# https://github.com/readthedocs/readthedocs.org/issues/2569
+master_doc = 'index'
# -- Options for HTML output -------------------------------------------------
|
Set master_doc for Sphinx
|
pypa_pep517
|
train
|
e1dec6061db4c8483e5def4fbb60ee79366c422b
|
diff --git a/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/data/mongo/MongoRepositoriesAutoConfiguration.java b/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/data/mongo/MongoRepositoriesAutoConfiguration.java
index <HASH>..<HASH> 100644
--- a/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/data/mongo/MongoRepositoriesAutoConfiguration.java
+++ b/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/data/mongo/MongoRepositoriesAutoConfiguration.java
@@ -21,7 +21,7 @@ import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
-import org.springframework.boot.autoconfigure.mongo.MongoAutoConfiguration;
+import org.springframework.boot.autoconfigure.mongo.MongoDataAutoConfiguration;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.data.mongodb.repository.MongoRepository;
@@ -58,7 +58,7 @@ import com.mongodb.Mongo;
MongoRepositoryConfigurationExtension.class })
@ConditionalOnProperty(prefix = "spring.data.mongodb.repositories", name = "enabled", havingValue = "true", matchIfMissing = true)
@Import(MongoRepositoriesAutoConfigureRegistrar.class)
-@AutoConfigureAfter(MongoAutoConfiguration.class)
+@AutoConfigureAfter(MongoDataAutoConfiguration.class)
public class MongoRepositoriesAutoConfiguration {
}
diff --git a/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/data/mongo/MongoRepositoriesAutoConfigurationTests.java b/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/data/mongo/MongoRepositoriesAutoConfigurationTests.java
index <HASH>..<HASH> 100644
--- a/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/data/mongo/MongoRepositoriesAutoConfigurationTests.java
+++ b/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/data/mongo/MongoRepositoriesAutoConfigurationTests.java
@@ -16,6 +16,8 @@
package org.springframework.boot.autoconfigure.data.mongo;
+import java.util.Set;
+
import org.junit.After;
import org.junit.Test;
import org.springframework.beans.factory.NoSuchBeanDefinitionException;
@@ -29,11 +31,14 @@ import org.springframework.boot.autoconfigure.mongo.MongoAutoConfiguration;
import org.springframework.boot.autoconfigure.mongo.MongoDataAutoConfiguration;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.context.annotation.Configuration;
+import org.springframework.data.mongodb.core.mapping.MongoMappingContext;
import org.springframework.data.mongodb.repository.config.EnableMongoRepositories;
+import org.springframework.test.util.ReflectionTestUtils;
import com.mongodb.Mongo;
import com.mongodb.MongoClient;
+import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertNotNull;
@@ -61,6 +66,12 @@ public class MongoRepositoriesAutoConfigurationTests {
assertNotNull(this.context.getBean(CityRepository.class));
Mongo mongo = this.context.getBean(Mongo.class);
assertThat(mongo, is(instanceOf(MongoClient.class)));
+ MongoMappingContext mappingContext = this.context
+ .getBean(MongoMappingContext.class);
+ @SuppressWarnings("unchecked")
+ Set<? extends Class<?>> entities = (Set<? extends Class<?>>) ReflectionTestUtils
+ .getField(mappingContext, "initialEntitySet");
+ assertThat(entities.size(), is(equalTo(1)));
}
@Test
|
Make MongoRepositoriesAutoConfiguration order more explicit
It has to come after MongoDataAutoConfiguration (not
MongoAutoConfiguration) because it looks for beans that are defined
there (e.g. MongoMappingContext which is the one that scans for entities).
Fixes gh-<I>
|
spring-projects_spring-boot
|
train
|
ab1fc35e8ac2a8861d532ec073a19e781791937a
|
diff --git a/lib/perpetuity/mapper.rb b/lib/perpetuity/mapper.rb
index <HASH>..<HASH> 100644
--- a/lib/perpetuity/mapper.rb
+++ b/lib/perpetuity/mapper.rb
@@ -104,7 +104,7 @@ module Perpetuity
end
def self.first
- retrieve.limit(1).first
+ data_source.first mapped_class
end
def self.all
diff --git a/lib/perpetuity/mongodb.rb b/lib/perpetuity/mongodb.rb
index <HASH>..<HASH> 100644
--- a/lib/perpetuity/mongodb.rb
+++ b/lib/perpetuity/mongodb.rb
@@ -37,13 +37,21 @@ module Perpetuity
end
def count klass
- database.collection(klass.to_s).count()
+ database.collection(klass.to_s).count
end
def delete_all klass
database.drop_collection klass.to_s
end
+ def first klass
+ data = database.collection(klass.to_s).find_one
+ object = klass.allocate
+ inject_data object, data
+
+ object
+ end
+
def retrieve klass, criteria, options = {}
objects = []
@@ -60,20 +68,33 @@ module Perpetuity
database.collection(klass.to_s).find(criteria, other_options).sort(sort_criteria).each do |document|
object = klass.allocate
- document.each_pair do |attribute,value|
- if object.respond_to?("#{attribute}=")
- object.send("#{attribute}=", value)
- else
- attribute = "@#{attribute}" unless attribute[0] == '@'
- object.instance_variable_set(attribute, value)
- end
- end
+ inject_data object, document
objects << object
end
objects
end
+ def inject_data object, data
+ data.each_pair do |attribute,value|
+ if object.respond_to?("#{attribute}=")
+ object.send("#{attribute}=", value)
+ else
+ attribute = "@#{attribute}" unless attribute[0] == '@'
+ object.instance_variable_set(attribute, value)
+ end
+ inject_id object, value
+ end
+ end
+
+ def inject_id object, id
+ object.instance_eval do
+ def id
+ @_id
+ end
+ end
+ end
+
def all klass
retrieve klass, {}, {}
end
diff --git a/lib/perpetuity/retrieval.rb b/lib/perpetuity/retrieval.rb
index <HASH>..<HASH> 100644
--- a/lib/perpetuity/retrieval.rb
+++ b/lib/perpetuity/retrieval.rb
@@ -30,13 +30,6 @@ module Perpetuity
def to_a
results = @data_source.retrieve(@class, @criteria, { attribute: sort_attribute, direction: sort_direction, limit: result_limit })
- results.each do |result|
- result.instance_eval do
- def id
- @_id
- end
- end
- end
results
end
|
Move ID injection out of retrieval class
This also uses the `find_one` method on the `mongo` gem in the mongodb
driver, rather than using the `find` method, which should provide a
small performance boost by not instantiating and dereferencing a
MongoDB cursor.
|
jgaskins_perpetuity
|
train
|
1d33c03537b20308fdef2efc15e592c212002d16
|
diff --git a/lib/register/enter_implementation.rb b/lib/register/enter_implementation.rb
index <HASH>..<HASH> 100644
--- a/lib/register/enter_implementation.rb
+++ b/lib/register/enter_implementation.rb
@@ -6,11 +6,9 @@ module Register
next unless code.is_a? Virtual::MethodEnter
# save return register and create a new frame
to = RegisterReference.new(:r0) # message base
- tmp = RegisterReference.new(:r5) # tmp
pc = RegisterReference.new(:pc)
- move1 = RegisterMachine.instance.ldr( tmp , pc )
- move2 = RegisterMachine.instance.ldr( tmp , to , 3 ) #TODO 3 == return reg, needs constant / layout
- block.replace(code , [move1,move2] )
+ move1 = RegisterMachine.instance.str( pc , to , Virtual::Message::RETURN )
+ block.replace(code , [move1] )
end
end
end
diff --git a/lib/register/return_implementation.rb b/lib/register/return_implementation.rb
index <HASH>..<HASH> 100644
--- a/lib/register/return_implementation.rb
+++ b/lib/register/return_implementation.rb
@@ -3,12 +3,11 @@ module Register
def run block
block.codes.dup.each do |code|
next unless code.is_a? Virtual::MethodReturn
- to = RegisterReference.new(:r0)
- tmp = RegisterReference.new(:r5)
+ #load the return address into pc, affecting return. (other cpus have commands for this, but not arm)
+ message = RegisterReference.new(:r0)
pc = RegisterReference.new(:pc)
- move1 = RegisterMachine.instance.ldr( to , tmp , 3 ) #TODO 3 == return reg, needs constant / layout
- move2 = RegisterMachine.instance.ldr( pc , tmp )
- block.replace(code , [move1,move2] )
+ move1 = RegisterMachine.instance.ldr( pc ,message , Virtual::Message::RETURN )
+ block.replace(code , [move1] )
end
end
end
diff --git a/lib/virtual/message.rb b/lib/virtual/message.rb
index <HASH>..<HASH> 100644
--- a/lib/virtual/message.rb
+++ b/lib/virtual/message.rb
@@ -20,6 +20,10 @@ module Virtual
# During compilation Message and frame objects are created to do type analysis
class Message
+ RETURN = 2
+ EXCEPTION = 3
+ SELF = 4
+
def initialize me , normal , exceptional
@me = me
@next_normal = normal
|
corrected method enter and return and created those missing constants
|
ruby-x_rubyx
|
train
|
93fc1e639991b7727b65f3511de86ec07e066659
|
diff --git a/OpenPNM/Fluids/models/density.py b/OpenPNM/Fluids/models/density.py
index <HASH>..<HASH> 100644
--- a/OpenPNM/Fluids/models/density.py
+++ b/OpenPNM/Fluids/models/density.py
@@ -6,13 +6,99 @@ Submodule -- density
"""
import scipy as _sp
-def IABWS(fluid,coefficients,
- **kwargs):
+def IdealGas(fluid,P,T,MW,**kwargs):
r"""
- Uses ideal gas equation to estimate molar density of a pure gas
+ Uses ideal gas equation of state to calculate the density of an ideal gas
+
+ INPUTS
+ ----------
+ P, T, MW: float, array_like
+
+ P pressure of the gas in [Pa]
+ T temperature of the gas in [K]
+ MW molecular weight of the gas in [kg/kmole]
+
+ OUTPUT
+ ----------
+ value: float, array_like
+ value is the density of the gas in [kg/m3]
+ """
+ P = fluid['pore.Pressure']
+ T = fluid['pore.Temperature']
+ MW = fluid['pore.MolecularWeight']
+ Rbar = 8314.47
+ R = Rbar/MW
+ value = P/(R*T)
+ return value
+def RealGas(fluid,P,T,Pc,Tc,MW,**kwargs):
+ r"""
+ Uses Van der Waals equation of state to calculate the density of a real gas
+
+ INPUTS
+ ----------
+ P, T, Pc, Tc, MW: float, array_like
+
+ P pressure of the gas in [Pa]
+ T temperature of the gas in [K]
+ Pc critical pressure of the gas in [Pa]
+ T critical temperature of the gas in [K]
+ MW molecular weight of the gas in [kg/kmole]
+
+ OUTPUT
+ ----------
+ value: float, array_like
+ value is the density of the gas in [kg/m3]
"""
- T = fluid['pore.temperature']
- P = fluid['pore.pressure']
- value = 'some_function'
+ P = fluid['pore.Pressure']
+ T = fluid['pore.Temperature']
+ Pc = fluid['pore.CriticalPressure']
+ Tc = fluid['pore.CriticalTemperature']
+ MW = fluid['pore.MolecularWeight']
+ Rbar = 8314.47
+ R = Rbar/MW
+ a = 27*(R**2)*(Tc**2)/(64*Pc); b = R*Tc/(8*Pc)
+ a0 = 1; a1 = -1/b; a2 = (R*T+b*P)/(a*b); a3 = -P/(a*b)
+ density = sp.roots([a0, a1, a2, a3])
+ value = sp.real(density[2])
return value
+
+
+def Water(fluid,T='pore.temperature',S='pore.salinity',**kwargs):
+ r"""
+ Calculates density of pure water or seawater at atmospheric pressure
+ using Eq. (8) given by Sharqawy et. al [1]_. Values at temperature higher
+ than the normal boiling temperature are calculated at the saturation pressure.
+
+ Parameters
+ ----------
+ T, S: strings
+ Property names where fluid temperature and salinity are located. T
+ must be in K, and S in g of salt per kg of fluid, or ppt (parts per
+ thousand)
+
+ Returns
+ -------
+ rho, the density in [kg/m3]
+
+ Notes
+ -----
+ VALIDITY: 273 < T < 453 K; 0 < S < 160 g/kg;
+ ACCURACY: 0.1 %
+
+ References
+ ----------
+ [1] Sharqawy M. H., Lienhard J. H., and Zubair, S. M., Desalination and Water Treatment, 2010.
+
+ """
+ T = fluid[T]
+ try:
+ S = fluid[S]
+ except:
+ S = 0
+ a1=9.9992293295E+02; a2=2.0341179217E-02; a3=-6.1624591598E-03; a4=2.2614664708E-05; a5=-4.6570659168E-08
+ b1=8.0200240891E-01; b2=-2.0005183488E-03; b3=1.6771024982E-05; b4=-3.0600536746E-08; b5=-1.6132224742E-11
+ rho_w = a1 + a2*T + a3*T**2 + a4*T**3 + a5*T**4;
+ D_rho = b1*S + b2*S*T + b3*S*(T**2) + b4*S*(T**3) + b5*(S**2)*(T**2);
+ value = rho_w + D_rho
+ return value
\ No newline at end of file
|
added density models
Former-commit-id: cc<I>f<I>b1a<I>cfe<I>c<I>cb<I>d1c<I>e5d
Former-commit-id: e<I>ccb5e7b6bd<I>b7a<I>bed<I>e9e<I>bdfc
|
PMEAL_OpenPNM
|
train
|
0f35e95f8887ebbb68ed083f3c9e1ad288237e89
|
diff --git a/js/bitso.js b/js/bitso.js
index <HASH>..<HASH> 100644
--- a/js/bitso.js
+++ b/js/bitso.js
@@ -327,18 +327,18 @@ module.exports = class bitso extends Exchange {
parseTicker (ticker, market = undefined) {
//
- // {
- // "high":"37446.85",
- // "last":"36599.54",
- // "created_at":"2022-01-28T12:06:11+00:00",
- // "book":"btc_usdt",
- // "volume":"7.29075419",
- // "vwap":"36579.1564400307",
- // "low":"35578.52",
- // "ask":"36574.76",
- // "bid":"36538.22",
- // "change_24":"-105.64"
- // }
+ // {
+ // "high":"37446.85",
+ // "last":"36599.54",
+ // "created_at":"2022-01-28T12:06:11+00:00",
+ // "book":"btc_usdt",
+ // "volume":"7.29075419",
+ // "vwap":"36579.1564400307",
+ // "low":"35578.52",
+ // "ask":"36574.76",
+ // "bid":"36538.22",
+ // "change_24":"-105.64"
+ // }
//
const symbol = this.safeSymbol (undefined, market);
const timestamp = this.parse8601 (this.safeString (ticker, 'created_at'));
@@ -379,18 +379,21 @@ module.exports = class bitso extends Exchange {
const response = await this.publicGetTicker (this.extend (request, params));
const ticker = this.safeValue (response, 'payload');
//
- // {
- // "high":"37446.85",
- // "last":"36599.54",
- // "created_at":"2022-01-28T12:06:11+00:00",
- // "book":"btc_usdt",
- // "volume":"7.29075419",
- // "vwap":"36579.1564400307",
- // "low":"35578.52",
- // "ask":"36574.76",
- // "bid":"36538.22",
- // "change_24":"-105.64"
- // }
+ // {
+ // "success":true,
+ // "payload":{
+ // "high":"37446.85",
+ // "last":"37051.96",
+ // "created_at":"2022-01-28T17:03:29+00:00",
+ // "book":"btc_usdt",
+ // "volume":"6.16176186",
+ // "vwap":"36582.6293169472",
+ // "low":"35578.52",
+ // "ask":"37083.62",
+ // "bid":"37039.66",
+ // "change_24":"478.45"
+ // }
+ // }
//
return this.parseTicker (ticker, market);
}
|
bitso: update response for ticker
|
ccxt_ccxt
|
train
|
ef2cc62f85b4ec67501c76e6537dba4506d3a26c
|
diff --git a/tests/ProxyManagerTest/Functional/LazyLoadingGhostFunctionalTest.php b/tests/ProxyManagerTest/Functional/LazyLoadingGhostFunctionalTest.php
index <HASH>..<HASH> 100644
--- a/tests/ProxyManagerTest/Functional/LazyLoadingGhostFunctionalTest.php
+++ b/tests/ProxyManagerTest/Functional/LazyLoadingGhostFunctionalTest.php
@@ -30,6 +30,7 @@ use ProxyManagerTestAsset\ClassWithPublicArrayProperty;
use ProxyManagerTestAsset\ClassWithPublicProperties;
use ProxyManagerTestAsset\ClassWithProtectedProperties;
use ProxyManagerTestAsset\ClassWithPrivateProperties;
+use ProxyManagerTestAsset\ClassWithSelfHint;
use ReflectionClass;
use ReflectionProperty;
@@ -368,6 +369,8 @@ class LazyLoadingGhostFunctionalTest extends PHPUnit_Framework_TestCase
*/
public function getProxyMethods()
{
+ $selfHintParam = new ClassWithSelfHint();
+
return array(
array(
'ProxyManagerTestAsset\\BaseClass',
@@ -390,6 +393,13 @@ class LazyLoadingGhostFunctionalTest extends PHPUnit_Framework_TestCase
array(),
'publicByReferenceMethodDefault'
),
+ array(
+ 'ProxyManagerTestAsset\\ClassWithSelfHint',
+ new ClassWithSelfHint(),
+ 'selfHintMethod',
+ array('parameter' => $selfHintParam),
+ $selfHintParam
+ ),
);
}
|
Verifying `self` hint usage in lazy loading value holder as of #<I>
|
Ocramius_ProxyManager
|
train
|
280fff2a414f649f62029dce448963f045380c0a
|
diff --git a/client.go b/client.go
index <HASH>..<HASH> 100644
--- a/client.go
+++ b/client.go
@@ -176,8 +176,9 @@ func (c *Client) DoBatch(workers int, reqs ...*Request) <-chan *Response {
func (c *Client) do(req *Request) (*Response, error) {
// create a response
resp := &Response{
- Request: req,
- Start: time.Now(),
+ Request: req,
+ Start: time.Now(),
+ bufferSize: req.BufferSize,
}
// default to current working directory
diff --git a/request.go b/request.go
index <HASH>..<HASH> 100644
--- a/request.go
+++ b/request.go
@@ -38,6 +38,12 @@ type Request struct {
// error returned.
Size uint64
+ // BufferSize specifies the size in bytes of the buffer that is used for
+ // transferring the requested file. Larger buffers may result in faster
+ // throughput but will use more memory and result in less frequent updates
+ // to the transfer progress statistics. Default: 4096.
+ BufferSize uint
+
// Hash specifies the hashing algorithm that will be used to compute the
// checksum value of the transferred file.
//
diff --git a/response.go b/response.go
index <HASH>..<HASH> 100644
--- a/response.go
+++ b/response.go
@@ -72,6 +72,9 @@ type Response struct {
// bytesCompleted specifies the number of bytes which were already
// transferred before this transfer began.
bytesResumed uint64
+
+ // bufferSize specifies the site in bytes of the transfer buffer.
+ bufferSize uint
}
// IsComplete indicates whether the Response transfer context has completed with
@@ -150,8 +153,14 @@ func (c *Response) copy() error {
// close writer when finished
defer c.writer.Close()
+ // set transfer buffer size
+ bufferSize := c.bufferSize
+ if bufferSize == 0 {
+ bufferSize = 4096
+ }
+
// download and update progress
- var buffer [4096]byte
+ buffer := make([]byte, bufferSize)
complete := false
for complete == false {
// read HTTP stream
|
Added configurable buffer size. Fixes #8
|
cavaliercoder_grab
|
train
|
91747b3ef4e7a37b5e7f6826837d41193ed66a91
|
diff --git a/lib/module.js b/lib/module.js
index <HASH>..<HASH> 100644
--- a/lib/module.js
+++ b/lib/module.js
@@ -11,33 +11,39 @@
(function () {
"use strict";
-
+
// get the core
var core = require('./core/core.js');
-
+
+ function mergeObject(from, to) {
+ for (var name in from) {
+ to[name] = from[name];
+ }
+ }
+
module.exports = function (arg) {
var object;
-
+
if (typeof arg === 'object' && arg !== null) {
object = new core.Listener(arguments);
} else {
- object = new core.Requester(arguments);
+ object = new core.Requester(arguments);
}
-
+
return object;
};
// export core so et it can be easy used in layers
- module.exports.core = core;
-
+ mergeObject(core, module.exports);
+
// export a setLayer method
function setLayer(layerName, object) {
core.layers[layerName] = object;
}
module.exports.setLayer = setLayer;
-
+
// predefine TCP and IPC layers
setLayer('TCP', require('./layers/TCP.js'));
setLayer('IPC', require('./layers/IPC.js'));
-
+
})();
\ No newline at end of file
|
[fix] extend the thintalk function with core abstraction
|
AndreasMadsen_thintalk
|
train
|
2e0ebba1052af09307b93567daece9a04115bf71
|
diff --git a/externs/mux.js b/externs/mux.js
index <HASH>..<HASH> 100644
--- a/externs/mux.js
+++ b/externs/mux.js
@@ -88,3 +88,50 @@ muxjs.mp4.Transmuxer.prototype.dispose = function() {};
* @exportDoc
*/
muxjs.mp4.Transmuxer.Segment;
+
+
+/**
+ * Parser for CEA closed captions embedded in video streams for Dash.
+ * @constructor
+ * @struct
+ */
+muxjs.mp4.CaptionParser = function() {};
+
+
+/**
+ * Initializes the closed caption parser.
+ */
+muxjs.mp4.CaptionParser.prototype.init = function() {};
+
+
+/**
+ * Return true if a new video track is selected or if the timescale is
+ * changed.
+ * @param {!Array.<number>} videoTrackIds A list of video tracks found in the
+ * init segment.
+ * @param {!Object.<number, number>} timescales The map of track Ids and the
+ * tracks' timescales in the init segment.
+ * @return {boolean}
+ */
+muxjs.mp4.CaptionParser.prototype.isNewInit = function(
+ videoTrackIds, timescales) {};
+
+
+/**
+ * Parses embedded CEA closed captions and interacts with the underlying
+ * CaptionStream, and return the parsed captions.
+ * @param {!Uint8Array} segment The fmp4 segment containing embedded captions
+ * @param {!Array.<number>} videoTrackIds A list of video tracks found in the
+ * init segment.
+ * @param {!Object.<number, number>} timescales The timescales found in the
+ * init segment.
+ * @return {Object} Parsed captions
+ */
+muxjs.mp4.CaptionParser.prototype.parse = function(
+ segment, videoTrackIds, timescales) {};
+
+
+/**
+ * Clear the parsed closed captions data for new data.
+ */
+muxjs.mp4.CaptionParser.prototype.clearParsedCaptions = function() {};
|
Add CaptionParser to mux.js externs
We're going to use Caption Parser from mux.js for parsing closed
captions for Dash. Adding the it to mux.js externs.
Change-Id: I<I>b4cfeed4abdb<I>df<I>c<I>b3e<I>ba<I>
|
google_shaka-player
|
train
|
e07a35a70179bdd82a1d4743f17286ee48180ae7
|
diff --git a/planet/cli/types.py b/planet/cli/types.py
index <HASH>..<HASH> 100644
--- a/planet/cli/types.py
+++ b/planet/cli/types.py
@@ -55,24 +55,6 @@ class CommaSeparatedFloat(click.types.StringParamType):
return ret
-class CommaSeparatedChoice(click.types.StringParamType):
- def __init__(self, *args, **kwargs):
- self.choice = click.types.Choice(*args, **kwargs)
-
- def get_metavar(self, param: "click.Parameter"):
- self.choice.get_metavar(param)
-
- def convert(self, value, param, ctx) -> List[float]:
- bad_values = CommaSeparatedString().convert(value, param, ctx)
- # import pdb; pdb.set_trace()
- if len(bad_values) > 1:
- values = [v.strip().split("'")[1] for v in bad_values]
- else:
- values = bad_values
- converted = [self.choice.convert(v, param, ctx) for v in values]
- return converted
-
-
class JSON(click.ParamType):
"""JSON specified as a string, json file filename, or stdin."""
name = 'JSON'
|
Removed the CommaSeparatedChoice class.
|
planetlabs_planet-client-python
|
train
|
2fe287a01ea1ddea166de3f23c26ee39a0c335c8
|
diff --git a/test/unit/test_generators.py b/test/unit/test_generators.py
index <HASH>..<HASH> 100644
--- a/test/unit/test_generators.py
+++ b/test/unit/test_generators.py
@@ -1,8 +1,8 @@
import porespy as ps
import numpy as np
-import scipy as sp
import pytest
import scipy.ndimage as spim
+import scipy.stats as spst
import matplotlib.pyplot as plt
plt.close('all')
@@ -144,7 +144,7 @@ class GeneratorTest():
def test_polydisperse_spheres(self):
phis = np.arange(0.1, 0.5, 0.2)
- dist = sp.stats.norm(loc=7, scale=2)
+ dist = spst.norm(loc=7, scale=2)
for phi in phis:
im = ps.generators.polydisperse_spheres(shape=[100, 100, 50],
porosity=phi, dist=dist,
@@ -223,14 +223,14 @@ class GeneratorTest():
assert len(np.unique(lt)) > 2
def test_RSA_3d_contained(self):
- im = sp.zeros([100, 100, 100], dtype=int)
+ im = np.zeros([100, 100, 100], dtype=int)
im = ps.generators.RSA(im, radius=10, volume_fraction=0.5,
mode='contained')
lt = ps.filters.local_thickness(im, sizes=[10, 9, 8, 7, 6, 5])
assert len(np.unique(lt)) == 2
def test_RSA_3d_extended(self):
- im = sp.zeros([100, 100, 100], dtype=int)
+ im = np.zeros([100, 100, 100], dtype=int)
im = ps.generators.RSA(im, radius=10, volume_fraction=0.5,
mode='extended')
im = np.pad(im, pad_width=1, mode='constant', constant_values=False)
@@ -238,7 +238,7 @@ class GeneratorTest():
assert len(np.unique(lt)) > 2
def test_RSA_2d_seqential_additions(self):
- im = sp.zeros([100, 100], dtype=int)
+ im = np.zeros([100, 100], dtype=int)
im = ps.generators.RSA(im, radius=10)
phi1 = ps.metrics.porosity(im)
im = ps.generators.RSA(im, radius=5)
diff --git a/test/unit/test_tools.py b/test/unit/test_tools.py
index <HASH>..<HASH> 100644
--- a/test/unit/test_tools.py
+++ b/test/unit/test_tools.py
@@ -140,21 +140,6 @@ class ToolsTest():
assert np.all(np.unique(im) == vals)
assert counts[1] < counts[2]
- def test_subdivide_3D(self):
- im = np.ones([50, 100, 150])
- ims = ps.tools.subdivide(im, divs=1)
- assert ims.shape == (1, 1, 1)
- assert np.all(im[tuple(ims[0, 0, 0])] == im)
- ims = ps.tools.subdivide(im, divs=2)
- assert ims.shape == (2, 2, 2)
- assert im[tuple(ims[0, 0, 0])].sum() == np.prod(im.shape)/8
-
- def test_subdivide_2D(self):
- im = np.ones([50, 100])
- ims = ps.tools.subdivide(im, divs=2)
- assert ims.shape == (2, 2)
- assert im[tuple(ims[0, 0])].sum() == np.prod(im.shape)/4
-
def test_subdivide_2D_with_scalar_overlap(self):
im = np.ones([150, 150])
s = ps.tools.subdivide(im, divs=3, overlap=10)
|
deleting old tests that used the shape of the result, which now fails since it's a list
|
PMEAL_porespy
|
train
|
91c4e43c8a262a9929e9711c7899163b724109b5
|
diff --git a/worker/worker-execution/score-worker-execution-impl/src/main/java/io/cloudslang/worker/execution/services/StubAplsLicensingServiceImpl.java b/worker/worker-execution/score-worker-execution-impl/src/main/java/io/cloudslang/worker/execution/services/StubAplsLicensingServiceImpl.java
index <HASH>..<HASH> 100644
--- a/worker/worker-execution/score-worker-execution-impl/src/main/java/io/cloudslang/worker/execution/services/StubAplsLicensingServiceImpl.java
+++ b/worker/worker-execution/score-worker-execution-impl/src/main/java/io/cloudslang/worker/execution/services/StubAplsLicensingServiceImpl.java
@@ -31,12 +31,12 @@ public class StubAplsLicensingServiceImpl implements AplsLicensingService {
}
@Override
- public boolean checkoutUiStep(String executionId, String branchId) {
+ public boolean incrementUiStep(String executionId, String branchId) {
return true;
}
@Override
- public void checkinUiStep(String executionId, String branchId) {
+ public void decrementUiStep(String executionId, String branchId) {
}
}
|
US/<I>/merge UI steps from master
|
CloudSlang_score
|
train
|
d7819f42d8230a4e1b76f0b25cab11168b80c144
|
diff --git a/src/main/java/com/microsoft/azure/functions/worker/broker/CoreTypeResolver.java b/src/main/java/com/microsoft/azure/functions/worker/broker/CoreTypeResolver.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/microsoft/azure/functions/worker/broker/CoreTypeResolver.java
+++ b/src/main/java/com/microsoft/azure/functions/worker/broker/CoreTypeResolver.java
@@ -4,7 +4,6 @@ import java.lang.annotation.*;
import java.lang.reflect.*;
import com.microsoft.azure.functions.*;
-import com.microsoft.azure.functions.annotation.*;
public class CoreTypeResolver {
private static boolean isOutputParameter(Type target) {
@@ -52,12 +51,24 @@ public class CoreTypeResolver {
if (annotation.toString().contains("com.microsoft.azure.functions.annotation")) {
annotationName = getBindingNameFromAnnotation(annotation);
}
+
if (annotationName == null) {
- CustomBinding customBindingAnnotation = annotation.annotationType().getAnnotation(CustomBinding.class);
+ Annotation customBindingAnnotation = null;
+ for (Annotation item : annotation.annotationType().getAnnotations()){
+ if (item.annotationType().getName().equals("com.microsoft.azure.functions.annotation.CustomBinding")) {
+ customBindingAnnotation = item;
+ break;
+ }
+ }
if (customBindingAnnotation != null) {
annotationName = getBindingNameFromAnnotation(annotation);
if (annotationName == null) {
- annotationName = getBindingNameFromCustomBindingAnnotation(customBindingAnnotation);
+ try {
+ Method name = customBindingAnnotation.annotationType().getMethod("name");
+ annotationName = getBindingNameFromCustomBindingAnnotation(customBindingAnnotation, name);
+ } catch (NoSuchMethodException ex) {
+ // Ignore
+ }
}
}
}
@@ -79,9 +90,9 @@ public class CoreTypeResolver {
return null;
}
- private static String getBindingNameFromCustomBindingAnnotation(CustomBinding customBindingAnnotation) {
+ private static String getBindingNameFromCustomBindingAnnotation(Annotation customBindingAnnotation, Method name) {
try {
- return customBindingAnnotation.name();
+ return (String) name.invoke(customBindingAnnotation);
} catch (Exception ex) {
// Ignore
return null;
@@ -89,9 +100,22 @@ public class CoreTypeResolver {
}
static String getBindingNameAnnotation(Parameter param) {
- BindingName paramAnnotation = param.getDeclaredAnnotation(BindingName.class);
- if (paramAnnotation != null) {
- return paramAnnotation.value();
+ Annotation bindingNameAnnotation = null;
+ for (Annotation item : param.getAnnotations()){
+ if (item.annotationType().getName().equals("com.microsoft.azure.functions.annotation.BindingName")){
+ bindingNameAnnotation = item;
+ break;
+ }
+ }
+ if (bindingNameAnnotation != null) {
+ String returnValue = null;
+ try {
+ Method value = bindingNameAnnotation.annotationType().getMethod("value");
+ returnValue = (String) value.invoke(bindingNameAnnotation);
+ } catch (Exception ex) {
+ // Ignore
+ }
+ return returnValue;
}
return new String("");
}
|
remove direct dependency on custombinding and bindingName
|
Azure_azure-functions-java-worker
|
train
|
3273a55c31748e2eb42144fcbc90c659a7441a17
|
diff --git a/loky/process_executor.py b/loky/process_executor.py
index <HASH>..<HASH> 100644
--- a/loky/process_executor.py
+++ b/loky/process_executor.py
@@ -378,6 +378,7 @@ def _queue_management_worker(executor_reference,
def shutdown_all_workers():
mp.util.debug("queue management thread shutting down")
+ executor_flags.flag_as_shutting_down()
# This is an upper bound
nb_children_alive = sum(p.is_alive() for p in processes.values())
try:
@@ -453,9 +454,7 @@ def _queue_management_worker(executor_reference,
# Mark the process pool broken so that submits fail right now.
executor = executor_reference()
if executor is not None:
- with executor._shutdown_lock:
- executor._flags.broken = True
- executor._flags.shutdown = True
+ executor._flags.flag_as_broken()
executor = None
# All futures in flight must be marked failed
for work_id, work_item in pending_work_items.items():
@@ -565,6 +564,7 @@ def _management_worker(executor_reference, executor_flags,
elif _is_crashed(call_queue._thread):
executor = executor_reference()
if is_shutting_down():
+ mp.util.debug("shutting down")
return
executor = None
cause_msg = ("The QueueFeederThread was terminated abruptly "
@@ -575,6 +575,7 @@ def _management_worker(executor_reference, executor_flags,
return
executor = executor_reference()
if is_shutting_down():
+ mp.util.debug("shutting down")
return
executor = None
time.sleep(.1)
@@ -586,9 +587,7 @@ def _shutdown_crash(executor_reference, processes, pending_work_items,
"worker processes. " + cause_msg)
executor = executor_reference()
if executor is not None:
- with executor._shutdown_lock:
- executor._flags.broken = True
- executor._flags.shutdown = True
+ executor._flags.flag_as_broken()
executor = None
call_queue.close()
# Terminate remaining workers forcibly: the queues or their
@@ -687,7 +686,6 @@ class ProcessPoolExecutor(_base.Executor):
# Shutdown is a two-step process.
self._flags = _ExecutorFlags()
- self._shutdown_lock = threading.Lock()
self._queue_count = 0
self._pending_work_items = {}
mp.util.debug('PoolProcessExecutor is setup')
@@ -773,7 +771,7 @@ class ProcessPoolExecutor(_base.Executor):
self._start_thread_management_thread()
def submit(self, fn, *args, **kwargs):
- with self._shutdown_lock:
+ with self._flags.shutdown_lock:
if self._flags.broken:
raise BrokenExecutor('A child process terminated abruptly, '
'the process pool is not usable anymore')
@@ -829,9 +827,7 @@ class ProcessPoolExecutor(_base.Executor):
def shutdown(self, wait=True, kill_workers=False):
mp.util.debug('shutting down executor %s' % self)
- with self._shutdown_lock:
- self._flags.shutdown = True
- self._flags.kill_workers = kill_workers
+ self._flags.flag_as_shutting_down(kill_workers)
if self._queue_management_thread:
# Wake up queue management thread
self._wakeup.set()
@@ -870,3 +866,14 @@ class _ExecutorFlags(object):
self.shutdown = False
self.broken = False
self.kill_workers = False
+ self.shutdown_lock = threading.Lock()
+
+ def flag_as_shutting_down(self, kill_workers=False):
+ with self.shutdown_lock:
+ self.shutdown = True
+ self.kill_workers = kill_workers
+
+ def flag_as_broken(self):
+ with self.shutdown_lock:
+ self.shutdown = True
+ self.broken = True
|
FIX better flagging for shutdown/broken
|
tomMoral_loky
|
train
|
f1997a7e6c5e92579492e67b79240a26309dce79
|
diff --git a/bundles/org.eclipse.orion.client.ui/web/orion/jslintworker.js b/bundles/org.eclipse.orion.client.ui/web/orion/jslintworker.js
index <HASH>..<HASH> 100644
--- a/bundles/org.eclipse.orion.client.ui/web/orion/jslintworker.js
+++ b/bundles/org.eclipse.orion.client.ui/web/orion/jslintworker.js
@@ -730,7 +730,7 @@ SOFTWARE.
turquoise, type, u, ul, undef, unescape, "unicode-bidi", unused,
unwatch, updateNow, urls, value, valueOf, var, version,
"vertical-align", video, violet, visibility, watch, wheat, white,
- "white-space", whitesmoke, widget, width, windows, "word-spacing",
+ "white-space", whitesmoke, widget, width, window, windows, "word-spacing",
"word-wrap", yahooCheckLogin, yahooLogin, yahooLogout, yellow,
yellowgreen, "z-index"
*/
@@ -868,6 +868,7 @@ var JSLINT = (function () {
setTimeout : false,
status : false,
top : false,
+ window : false,
XMLHttpRequest : false
},
|
add 'window' to set of globals in browser mode
|
eclipse_orion.client
|
train
|
a90e1866b7539b80c7bad05315f7708625d9d365
|
diff --git a/cumulusci/cli/cli.py b/cumulusci/cli/cli.py
index <HASH>..<HASH> 100644
--- a/cumulusci/cli/cli.py
+++ b/cumulusci/cli/cli.py
@@ -372,7 +372,7 @@ def project_init(config, extend):
if extend:
yml_config.append(' dependencies:')
- yml_config.append(' github: {}'.format(extend))
+ yml_config.append(' - github: {}'.format(extend))
# git:
git_config = []
|
- Fix issue with github dependency set by cci project init
- beta<I>
|
SFDO-Tooling_CumulusCI
|
train
|
5c8c875b6191e9f2501f95bd0660cc76b45408b7
|
diff --git a/contract/models/contract.py b/contract/models/contract.py
index <HASH>..<HASH> 100644
--- a/contract/models/contract.py
+++ b/contract/models/contract.py
@@ -183,7 +183,6 @@ class AccountAnalyticAccount(models.Model):
)
):
self[field_name] = self.contract_template_id[field_name]
- self.recurring_invoice_line_ids._onchange_date_start()
@api.onchange('partner_id')
def _onchange_partner_id(self):
@@ -225,6 +224,7 @@ class AccountAnalyticAccount(models.Model):
contract_line
)
new_lines += contract_line_model.new(vals)
+ new_lines._onchange_date_start()
return new_lines
@api.multi
diff --git a/contract/models/contract_line.py b/contract/models/contract_line.py
index <HASH>..<HASH> 100644
--- a/contract/models/contract_line.py
+++ b/contract/models/contract_line.py
@@ -658,8 +658,16 @@ class AccountAnalyticInvoiceLine(models.Model):
rec.cancel()
else:
if not rec.date_end or rec.date_end > date_end:
+ old_date_end = rec.date_end
+ values = {
+ 'date_end': date_end,
+ 'is_auto_renew': False,
+ 'manual_renew_needed': manual_renew_needed,
+ }
+ if rec.last_date_invoiced == date_end:
+ values['recurring_next_date'] = False
+ rec.write(values)
if post_message:
- old_date_end = rec.date_end
msg = _(
"""Contract line for <strong>{product}</strong>
stopped: <br/>
@@ -671,13 +679,6 @@ class AccountAnalyticInvoiceLine(models.Model):
)
)
rec.contract_id.message_post(body=msg)
- rec.write(
- {
- 'date_end': date_end,
- 'is_auto_renew': False,
- "manual_renew_needed": manual_renew_needed,
- }
- )
else:
rec.write(
{
diff --git a/contract/models/res_partner.py b/contract/models/res_partner.py
index <HASH>..<HASH> 100644
--- a/contract/models/res_partner.py
+++ b/contract/models/res_partner.py
@@ -16,14 +16,10 @@ class ResPartner(models.Model):
def _compute_contract_count(self):
contract_model = self.env['account.analytic.account']
- today = fields.Date.today()
fetch_data = contract_model.read_group(
[
('recurring_invoices', '=', True),
('partner_id', 'child_of', self.ids),
- '|',
- ('date_end', '=', False),
- ('date_end', '>=', today),
],
['partner_id', 'contract_type'],
['partner_id', 'contract_type'],
diff --git a/contract/tests/test_contract.py b/contract/tests/test_contract.py
index <HASH>..<HASH> 100644
--- a/contract/tests/test_contract.py
+++ b/contract/tests/test_contract.py
@@ -1807,3 +1807,9 @@ class TestContract(TestContractBase):
self.contract.recurring_invoice_line_ids.cancel()
self.contract.recurring_invoice_line_ids.unlink()
self.assertFalse(self.contract.recurring_create_invoice())
+
+ def test_stop_at_last_date_invoiced(self):
+ self.contract.recurring_create_invoice()
+ self.assertTrue(self.acct_line.recurring_next_date)
+ self.acct_line.stop(self.acct_line.last_date_invoiced)
+ self.assertFalse(self.acct_line.recurring_next_date)
diff --git a/contract/views/contract.xml b/contract/views/contract.xml
index <HASH>..<HASH> 100644
--- a/contract/views/contract.xml
+++ b/contract/views/contract.xml
@@ -204,7 +204,7 @@
<group expand="0" string="Group By...">
<filter name="next_invoice"
string="Next Invoice"
- domain="[('recurring_next_date', '>=', time.strftime('%Y-%m-%d'))]"
+ domain="[('recurring_next_date', '!=', False)]"
context="{'group_by':'recurring_next_date'}"
/>
<filter name="date_end"
|
[FIX] - don't play onchange date_start for old lines on contract template change
[FIX] - Fix stop post message
[FIX] - Fix sale_contract_count should count all partner contract
[FIX] - set recurring_next_date to False if contract line stoped at last date invoiced
[FIX] - Group by next_invoice also considers dates in the past
|
OCA_contract
|
train
|
d35d8cce559c7a612b7c8acdf7c6f65e649caece
|
diff --git a/spyder/plugins/ipythonconsole/widgets/figurebrowser.py b/spyder/plugins/ipythonconsole/widgets/figurebrowser.py
index <HASH>..<HASH> 100644
--- a/spyder/plugins/ipythonconsole/widgets/figurebrowser.py
+++ b/spyder/plugins/ipythonconsole/widgets/figurebrowser.py
@@ -24,9 +24,13 @@ class FigureBrowserWidget(RichJupyterWidget):
This widget can also block the plotting of inline figures in the IPython
Console so that figures are only plotted in the plots plugin.
"""
- mute_inline_plotting = None
+ _mute_inline_plotting = None
sended_render_message = False
+ def set_mute_inline_plotting(self, mute_inline_plotting):
+ """Set mute_inline_plotting"""
+ self._mute_inline_plotting = mute_inline_plotting
+
# ---- Private API (overrode by us)
def _handle_display_data(self, msg):
"""
@@ -49,7 +53,7 @@ class FigureBrowserWidget(RichJupyterWidget):
if img is not None:
self.sig_new_inline_figure.emit(img, fmt)
- if self.mute_inline_plotting:
+ if self._mute_inline_plotting:
if not self.sended_render_message:
self._append_html("<br>", before_prompt=True)
self.append_html_message(
diff --git a/spyder/plugins/plots/widgets/figurebrowser.py b/spyder/plugins/plots/widgets/figurebrowser.py
index <HASH>..<HASH> 100644
--- a/spyder/plugins/plots/widgets/figurebrowser.py
+++ b/spyder/plugins/plots/widgets/figurebrowser.py
@@ -203,7 +203,7 @@ class FigureBrowser(QWidget, SpyderWidgetMixin):
elif option == 'mute_inline_plotting':
self.mute_inline_plotting = value
if self.shellwidget:
- self.shellwidget.mute_inline_plotting = value
+ self.shellwidget.set_mute_inline_plotting(value)
elif option == 'show_plot_outline':
self.show_fig_outline_in_viewer(value)
@@ -241,7 +241,7 @@ class FigureBrowser(QWidget, SpyderWidgetMixin):
def set_shellwidget(self, shellwidget):
"""Bind the shellwidget instance to the figure browser"""
self.shellwidget = shellwidget
- self.shellwidget.mute_inline_plotting = self.mute_inline_plotting
+ self.shellwidget.set_mute_inline_plotting(self.mute_inline_plotting)
shellwidget.sig_new_inline_figure.connect(self._handle_new_figure)
def _handle_new_figure(self, fig, fmt):
|
_mute_inline_plotting
|
spyder-ide_spyder
|
train
|
790e6ebdad0568a687f782ae1ecaf7eff8a2d4eb
|
diff --git a/main/main.go b/main/main.go
index <HASH>..<HASH> 100644
--- a/main/main.go
+++ b/main/main.go
@@ -78,7 +78,7 @@ func main() {
theApp := app.NewApp(cmdRunner, cmdFactory.CommandMetadatas()...)
//command `cf` without argument
- if len(os.Args) == 1 {
+ if len(os.Args) == 1 || os.Args[1] == "help" {
theApp.Run(os.Args)
} else if cmdFactory.CheckIfCoreCmdExists(os.Args[1]) {
callCoreCommand(os.Args[0:], theApp)
diff --git a/main/main_test.go b/main/main_test.go
index <HASH>..<HASH> 100644
--- a/main/main_test.go
+++ b/main/main_test.go
@@ -70,7 +70,7 @@ var _ = Describe("main", func() {
It("Calls core cf command if the plugin shares the same name", func() {
output := Cf("help")
- Eventually(output.Out).ShouldNot(Say("You called help in test_1"))
+ Consistently(output.Out, 1).ShouldNot(Say("You called help in test_1"))
})
})
})
|
help will run as a core command instead of calling plugin commands
[Finishes #<I>]
|
cloudfoundry_cli
|
train
|
6472c3581cfbbd1ec90ff1ea111c8439d01b01c6
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -22,7 +22,8 @@ function StyleManifest(inputNode, options) {
this.currentTree = new FSTree();
this.styleFiles = {};
- this.outputFileName = options.outputFileName;
+ this.outputFileStem = options.outputFileNameWithoutExtension;
+ this.defaultExtension = options.defaultExtension;
}
StyleManifest.prototype.build = function() {
@@ -74,7 +75,7 @@ StyleManifest.prototype.makeManifest = function() {
for (var file in this.styleFiles[extension]) {
output += this.styleFiles[extension][file] + ';' + os.EOL;
}
- fs.writeFileSync(path.join(this.outputPath, this.outputFileName + extension), output);
+ fs.writeFileSync(path.join(this.outputPath, this.outputFileStem + extension), output);
}
}
@@ -86,7 +87,7 @@ const DUMMY_FILE_COMMENT = '\
StyleManifest.prototype.ensureFile = function() {
if (Object.keys(this.styleFiles).length === 0) {
if (!this.dummyFile) {
- this.dummyFile = path.join(this.outputPath, this.outputFileName + '.css');
+ this.dummyFile = path.join(this.outputPath, this.outputFileStem + '.' + this.defaultExtension);
fs.writeFileSync(this.dummyFile, DUMMY_FILE_COMMENT);
}
} else if (this.dummyFile) {
|
feat(better defaults): allowing to set the default extension when no files are passed to the plugin, also explicitly stating that the filename should not have an extension
|
webark_broccoli-style-manifest
|
train
|
68fa92051b62bec76da61b4232213b3265130490
|
diff --git a/phoebe-testlib/test_DC/test_dc_with_rv2.py b/phoebe-testlib/test_DC/test_dc_with_rv2.py
index <HASH>..<HASH> 100644
--- a/phoebe-testlib/test_DC/test_dc_with_rv2.py
+++ b/phoebe-testlib/test_DC/test_dc_with_rv2.py
@@ -50,14 +50,26 @@ def get_bundle_with_data_and_initial_guess():
mybundle.add_parameter('asini@orbit', replaces='sma')
# Define priors
- mybundle.set_prior('ecc', distribution='uniform', lower=0, upper=1)
- mybundle.set_prior('per0', distribution='uniform', lower=0, upper=360)
- mybundle.set_prior('vgamma', distribution='uniform', lower=-30, upper=10)
- mybundle.set_prior('incl', distribution='uniform', lower=0, upper=90)
- mybundle.set_prior('q', distribution='uniform', lower=0.5, upper=1)
- mybundle.set_prior('sma', distribution='uniform', lower=8, upper=12)
- mybundle.set_prior('mass1', distribution='uniform', lower=0.8, upper=1.2)
- mybundle.set_prior('asini@orbit', distribution='uniform', lower=0, upper=15)
+ # We define the priors here so that the frontend feedback prints out resonable
+ # (not just random numbers) in its output of the DC solution.
+ # There values are not currently being used in the DC fitting algorithum
+# mybundle.set_prior('ecc', distribution='uniform', lower=0, upper=1)
+# mybundle.set_prior('per0', distribution='uniform', lower=0, upper=360)
+# mybundle.set_prior('vgamma', distribution='uniform', lower=-30, upper=10)
+# mybundle.set_prior('incl', distribution='uniform', lower=0, upper=90)
+# mybundle.set_prior('q', distribution='uniform', lower=0.5, upper=1)
+# mybundle.set_prior('sma', distribution='uniform', lower=8, upper=12)
+# mybundle.set_prior('mass1', distribution='uniform', lower=0.8, upper=1.2)
+# mybundle.set_prior('asini@orbit', distribution='uniform', lower=0, upper=15)
+
+ mybundle.set_prior('ecc', distribution='normal', mu=0.33,sigma=0.01)
+ mybundle.set_prior('per0', distribution='normal', mu=54.0,sigma=0.5)
+ mybundle.set_prior('vgamma', distribution='normal', mu=-11.0,sigma=1.0)
+ mybundle.set_prior('incl', distribution='normal', mu=8.,sigma=4.)
+ mybundle.set_prior('q', distribution='normal', mu=0.67,sigma=0.1)
+ mybundle.set_prior('sma', distribution='normal', mu=10.5,sigma=0.5)
+ #mybundle.set_prior('mass1', distribution='normal', mu=3.0,sigma=8.)
+ #mybundle.set_prior('asini@orbit', distribution='normal', mu=2.0,sigma=9.)
# Mark adjustables
mybundle.set_adjust('ecc')
@@ -133,17 +145,16 @@ if __name__ == "__main__":
initial_text.append(("{:10s} = {:16.8f}".format(twig, init_bundle[twig])))
- feedback = run_dc_backend(system)
-
-
- print("feedback = ",feedback)
+ # Uncomment the following lines if you wish to run the DC algorithum using the backend
+ #feedback = run_dc_backend(system)
+ #print("feedback = ",feedback)
for i,twig in enumerate(['ecc', 'per0', 'vgamma', 'sma', 'incl', 'q']):
print(initial_text[i] +' ---> '+"{:10s} = {:16.8f}".format(twig, init_bundle[twig]))
-
+ # Run DC using the frontend.
init_bundle = get_bundle_with_data_and_initial_guess()
feedback = run_dc_frontend(init_bundle)
|
Updated test_dc_with_rv2.py with some additional comments
|
phoebe-project_phoebe2
|
train
|
550c434aef6bf451705fe668875cee3f7738837b
|
diff --git a/dcard/posts.py b/dcard/posts.py
index <HASH>..<HASH> 100644
--- a/dcard/posts.py
+++ b/dcard/posts.py
@@ -10,8 +10,17 @@ client = Client()
class Post:
- def __init__(self):
- pass
+ def __init__(self, metas):
+ '''
+ params `metas`: list of article_metas/ids, or one article_meta/id,
+ article_meta must contain `id` field
+ '''
+ if isinstance(metas, list):
+ first = metas[0]
+ ids = [meta['id'] for meta in metas] if isinstance(first, dict) else metas
+ else:
+ ids = [metas['id']] if isinstance(metas, dict) else [metas]
+ self.ids = ids
@staticmethod
def build_url(post_id):
@@ -46,10 +55,8 @@ class Post:
return comments
- @staticmethod
- def _get(post_ids, **kwargs):
-
- post_urls = [Post.build_url(i) for i in post_ids]
+ def get(self, **kwargs):
+ post_urls = [Post.build_url(i) for i in self.ids]
crawl_links = kwargs.get('links', True)
crawl_content = kwargs.get('content', True)
@@ -71,19 +78,4 @@ class Post:
for i, url in enumerate(post_urls):
results[i]['comments'] = Post.get_comments(url)
- return results
-
- @staticmethod
- def get(post_meta=None, post_id=None, **kwargs):
- ids = []
- if post_meta:
- if isinstance(post_meta, list):
- ids = [m['id'] for m in post_meta]
- else:
- ids = [post_meta['id']]
- if post_id:
- if isinstance(post_id, list):
- ids = post_id
- else:
- ids = [post_id]
- return Post._get(ids, **kwargs)
+ return results[0] if len(results) == 1 else results
diff --git a/spider.py b/spider.py
index <HASH>..<HASH> 100644
--- a/spider.py
+++ b/spider.py
@@ -15,6 +15,6 @@ if __name__ == '__main__':
print(len(ids))
t = time.time()
- articles = dcard.posts.get(post_id=ids[:10], comments=False)
+ articles = dcard.posts(ids[:15]).get(comments=False)
print('{:.5f}'.format(time.time() - t))
print(len(articles))
diff --git a/tests/test_dcard.py b/tests/test_dcard.py
index <HASH>..<HASH> 100644
--- a/tests/test_dcard.py
+++ b/tests/test_dcard.py
@@ -41,6 +41,6 @@ def test_post_ids(forums):
def test_post_bundle():
- post = Dcard.posts.get({'id': 224341009})[0]
+ post = Dcard.posts(224341009).get()
comment_count = post['content']['commentCount']
assert comment_count == len(post['comments'])
|
refctor class posts inner-interface
|
leVirve_dcard-spider
|
train
|
baed22bb1c3a940d3cacecec51e4df19f6c28541
|
diff --git a/lib/moodlelib.php b/lib/moodlelib.php
index <HASH>..<HASH> 100644
--- a/lib/moodlelib.php
+++ b/lib/moodlelib.php
@@ -3978,6 +3978,7 @@ function &get_mailer($action='get') {
*
* @uses $CFG
* @uses $FULLME
+ * @uses $MNETIDPJUMPURL IdentityProvider(IDP) URL user hits to jump to mnet peer.
* @uses SITEID
* @param user $user A {@link $USER} object
* @param user $from A {@link $USER} object
@@ -3994,7 +3995,7 @@ function &get_mailer($action='get') {
*/
function email_to_user($user, $from, $subject, $messagetext, $messagehtml='', $attachment='', $attachname='', $usetrueaddress=true, $replyto='', $replytoname='', $wordwrapwidth=79) {
- global $CFG, $FULLME, $IDPJUMPURL;
+ global $CFG, $FULLME, $MNETIDPJUMPURL;
static $mnetjumps = array();
if (empty($user)) {
@@ -4027,12 +4028,12 @@ function email_to_user($user, $from, $subject, $messagetext, $messagehtml='', $a
require_once($CFG->dirroot.'/mnet/lib.php');
// Form the request url to hit the idp's jump.php
if (isset($mnetjumps[$user->mnethostid])) {
- $IDPJUMPURL = $mnetjumps[$user->mnethostid];
+ $MNETIDPJUMPURL = $mnetjumps[$user->mnethostid];
} else {
$idp = mnet_get_peer_host($user->mnethostid);
$idpjumppath = mnet_get_app_jumppath($idp->applicationid);
- $IDPJUMPURL = $idp->wwwroot . $idpjumppath . '?hostwwwroot=' . $CFG->wwwroot . '&wantsurl=';
- $mnetjumps[$user->mnethostid] = $IDPJUMPURL;
+ $MNETIDPJUMPURL = $idp->wwwroot . $idpjumppath . '?hostwwwroot=' . $CFG->wwwroot . '&wantsurl=';
+ $mnetjumps[$user->mnethostid] = $MNETIDPJUMPURL;
}
$messagetext = preg_replace_callback("%($CFG->wwwroot[^[:space:]]*)%",
diff --git a/mnet/lib.php b/mnet/lib.php
index <HASH>..<HASH> 100644
--- a/mnet/lib.php
+++ b/mnet/lib.php
@@ -584,14 +584,22 @@ function mnet_get_peer_host ($mnethostid) {
* log in at their mnet identity provider (if they are not already logged in)
* before ultimately being directed to the original url.
*
- * uses global IDPJUMPURL - the url which user should initially be directed to
+ * uses global MNETIDPJUMPURL the url which user should initially be directed to
+ * MNETIDPJUMPURL is a URL associated with a moodle networking peer when it
+ * is fulfiling a role as an identity provider (IDP). Different urls for
+ * different peers, the jumpurl is formed partly from the IDP's webroot, and
+ * partly from a predefined local path within that webwroot.
+ * The result of the user hitting MNETIDPJUMPURL is that they will be asked
+ * to login (at their identity provider (if they aren't already)), mnet
+ * will prepare the necessary authentication information, then redirect
+ * them back to somewhere at the content provider(CP) moodle (this moodle)
* @param array $url array with 2 elements
* 0 - context the url was taken from, possibly just the url, possibly href="url"
* 1 - the destination url
* @return string the url the remote user should be supplied with.
*/
function mnet_sso_apply_indirection ($url) {
- global $IDPJUMPURL;
+ global $MNETIDPJUMPURL;
$localpart='';
$urlparts = parse_url($url[1]);
@@ -606,7 +614,7 @@ function mnet_sso_apply_indirection ($url) {
$localpart .= '#'.$urlparts['fragment'];
}
}
- $indirecturl = $IDPJUMPURL . urlencode($localpart);
+ $indirecturl = $MNETIDPJUMPURL . urlencode($localpart);
//If we matched on more than just a url (ie an html link), return the url to an href format
if ($url[0] != $url[1]) {
$indirecturl = 'href="'.$indirecturl.'"';
|
MNET-<I> Additional commenting and variable name change
|
moodle_moodle
|
train
|
67c737f325373f9486647069a56d4a795475e1cd
|
diff --git a/src/modules/Dropdown/Dropdown.js b/src/modules/Dropdown/Dropdown.js
index <HASH>..<HASH> 100644
--- a/src/modules/Dropdown/Dropdown.js
+++ b/src/modules/Dropdown/Dropdown.js
@@ -447,6 +447,7 @@ export default class Dropdown extends Component {
document.addEventListener('click', this.closeOnDocumentClick)
document.removeEventListener('keydown', this.openOnArrow)
document.removeEventListener('keydown', this.openOnSpace)
+ this.scrollSelectedItemIntoView()
} else if (prevState.open && !this.state.open) {
debug('dropdown closed')
this.handleClose()
@@ -939,8 +940,11 @@ export default class Dropdown extends Component {
scrollSelectedItemIntoView = () => {
debug('scrollSelectedItemIntoView()')
+ if (!this.ref) return
const menu = this.ref.querySelector('.menu.visible')
+ if (!menu) return
const item = menu.querySelector('.item.selected')
+ if (!item) return
debug(`menu: ${menu}`)
debug(`item: ${item}`)
const isOutOfUpperView = item.offsetTop < menu.scrollTop
@@ -962,6 +966,7 @@ export default class Dropdown extends Component {
if (onOpen) onOpen(e, this.props)
this.trySetState({ open: true })
+ this.scrollSelectedItemIntoView()
}
close = (e) => {
diff --git a/test/specs/modules/Dropdown/Dropdown-test.js b/test/specs/modules/Dropdown/Dropdown-test.js
index <HASH>..<HASH> 100644
--- a/test/specs/modules/Dropdown/Dropdown-test.js
+++ b/test/specs/modules/Dropdown/Dropdown-test.js
@@ -1066,6 +1066,20 @@ describe('Dropdown', () => {
.setProps({ open: true })
dropdownMenuIsOpen()
})
+ it('calls scrollSelectedItemIntoView when changed from false to true', () => {
+ wrapperMount(<Dropdown options={options} selection open={false} />)
+
+ const instance = wrapper.instance()
+ sandbox.spy(instance, 'scrollSelectedItemIntoView')
+
+ instance.scrollSelectedItemIntoView
+ .should.not.have.been.called()
+
+ wrapper.setProps({ open: true })
+
+ instance.scrollSelectedItemIntoView
+ .should.have.been.calledOnce()
+ })
})
describe('multiple', () => {
|
fix(Dropdown):make active dropdown item scroll into view on open (#<I>)
* fix:make active dropdown item visible into view(#<I>)
* fix:check this.ref, menu, item in scrollSelectedItemIntoView method
* test(Dropdown): scroll item into view on open
|
Semantic-Org_Semantic-UI-React
|
train
|
4031968eb1be82c48ac8f7c6b51a2988d4b62a5f
|
diff --git a/lim/genetics/qtl/test/test_qtl.py b/lim/genetics/qtl/test/test_qtl.py
index <HASH>..<HASH> 100644
--- a/lim/genetics/qtl/test/test_qtl.py
+++ b/lim/genetics/qtl/test/test_qtl.py
@@ -58,6 +58,8 @@ def test_qtl_normal_scan():
#
# X[:] = 1
# qtl = normal_scan(y, X, G=G, progress=False)
+# for p in qtl.pvalues():
+# print(p)
def test_qtl_binomial_scan():
@@ -94,5 +96,31 @@ def test_qtl_binomial_scan():
rtol=1e-5)
+def test_qtl_binomial_scan_covariate_redundance():
+ random = RandomState(9)
+
+ N = 50
+ G = random.randn(N, N + 100)
+ G = stdnorm(G, 0)
+ G /= sqrt(G.shape[1])
+
+ p = 5
+ X = random.randn(N, p)
+ X = stdnorm(X, 0)
+ X /= sqrt(X.shape[1])
+
+ ntrials = random.randint(1, 50, N)
+ nsuccesses = binomial(
+ ntrials,
+ -0.1,
+ G,
+ causal_variants=X,
+ causal_variance=0.1,
+ random_state=random)
+
+ X[:] = 1
+ qtl = binomial_scan(nsuccesses, ntrials, X, G=G, progress=False)
+ assert_allclose(qtl.pvalues(), [1] * p)
+
if __name__ == '__main__':
__import__('pytest').main([__file__, '-s'])
diff --git a/lim/inference/ep/fixed.py b/lim/inference/ep/fixed.py
index <HASH>..<HASH> 100644
--- a/lim/inference/ep/fixed.py
+++ b/lim/inference/ep/fixed.py
@@ -44,7 +44,7 @@ class FixedEP(object):
denom = row00 * row11
denom -= row01**2
- with errstate(divide='ignore'):
+ with errstate(divide='ignore', invalid='ignore'):
betas0 /= denom
betas1 /= denom
|
fixed ep for redundancy
|
limix_lim
|
train
|
99a2123977058f2617beabc9438a01445a2767e2
|
diff --git a/lib/uxml/tree.py b/lib/uxml/tree.py
index <HASH>..<HASH> 100644
--- a/lib/uxml/tree.py
+++ b/lib/uxml/tree.py
@@ -22,7 +22,7 @@ class node(object):
class element(node):
def __init__(self, name, attrs=None, parent=None):#, ancestors=None):
self.xml_name = name
- self.xml_attrs = attrs or {}
+ self.xml_attributes = attrs or {}
self.xml_parent = parent
self.xml_children = []
#self.xml_ancestors = ancestors or []
@@ -41,6 +41,10 @@ class element(node):
strbits.extend(['</', self.xml_name, '>'])
return ''.join(strbits)
+ @property
+ def xml_value(self):
+ return ''.join(map(lambda x: x.xml_value, self.xml_children))
+
def __repr__(self):
return u'<uxml.element ({0}) "{1}" with {2} children>'.format(hash(self), self.xml_name, len(self.xml_children))
@@ -59,6 +63,10 @@ class text(node, str):
def xml_encode(self):
return str(self)
+ @property
+ def xml_value(self):
+ return str(self)
+
#def unparse(self):
# return '<' + self.name.encode('utf-8') + unparse_attrmap(self.attrmap) + '>'
|
Fixes to uxml.tree
|
uogbuji_amara3-xml
|
train
|
d06b64d4b9e8203945ad946495ff300d9852b37c
|
diff --git a/lib/utils/fs/safe-move-file.js b/lib/utils/fs/safe-move-file.js
index <HASH>..<HASH> 100644
--- a/lib/utils/fs/safe-move-file.js
+++ b/lib/utils/fs/safe-move-file.js
@@ -1,6 +1,7 @@
'use strict';
const fsp = require('fs').promises;
+const fse = require('fs-extra');
const crypto = require('crypto');
const path = require('path');
@@ -43,11 +44,11 @@ async function safeMoveFile(oldPath, newPath) {
const tempPath = generateTemporaryPathOnDestinationDevice(newPath);
// Copy onto the destination filesystem (not guaranteed to be atomic)
- await fsp.copyFile(oldPath, tempPath);
+ await fse.copy(oldPath, tempPath);
// Atomically move the file onto the destination path, overwriting it
await fsp.rename(tempPath, newPath);
// Delete the old file once both the above operations succeed
- await fsp.unlink(oldPath);
+ await fse.remove(oldPath);
} else {
throw err;
}
|
fix: Properly move directories across filesystems
|
serverless_serverless
|
train
|
334ebce0589ced0202addb23031830bc3325ac96
|
diff --git a/simuvex/procedures/syscalls/__init__.py b/simuvex/procedures/syscalls/__init__.py
index <HASH>..<HASH> 100644
--- a/simuvex/procedures/syscalls/__init__.py
+++ b/simuvex/procedures/syscalls/__init__.py
@@ -9,7 +9,7 @@ syscall_table['AMD64'][3] = 'close'
syscall_table['AMD64'][4] = 'stat'
syscall_table['AMD64'][5] = 'fstat'
syscall_table['AMD64'][6] = 'stat'
-syscall_table['AMD64'][9] = 'mtable'
+syscall_table['AMD64'][9] = 'mmap'
syscall_table['AMD64'][60] = 'exit'
syscall_table['AMD64'][231] = 'exit' # really exit_group, but close enough
|
export the syscall table to a separate file from the handler
|
angr_angr
|
train
|
a3ce1fe7e052489f50285b4689b472d5e4eebd4e
|
diff --git a/lib/dynode/client.js b/lib/dynode/client.js
index <HASH>..<HASH> 100644
--- a/lib/dynode/client.js
+++ b/lib/dynode/client.js
@@ -170,7 +170,7 @@ Client.prototype.batchGetItem = function(options, cb) {
var responses = {};
var meta = {UnprocessedKeys: resp.UnprocessedKeys, ConsumedCapacityUnits: {}};
for (var table in resp.Responses) {
- meta.ConsumedCapacityUnits[table] = {ConsumedCapacityUnits: resp.Responses[table].ConsumedCapacityUnits};
+ meta.ConsumedCapacityUnits[table] = resp.Responses[table].ConsumedCapacityUnits;
responses[table] = resp.Responses[table].Items.map(Types.parse)
}
return cb(null, responses, meta);
diff --git a/test/unit/client-test.js b/test/unit/client-test.js
index <HASH>..<HASH> 100644
--- a/test/unit/client-test.js
+++ b/test/unit/client-test.js
@@ -429,6 +429,50 @@ describe("DynamoDB Client unit tests", function(){
client.batchGetItem(options, done);
});
+ it("should parse returned response to json", function(done){
+ var response = {
+ Responses :
+ { Table1 : {
+ Items:[
+ {"name": {"S":"Bob"},"Age": {"N":"22"} },
+ {"name": {"S":"Dan"},"Age": {"N":"66"} }
+ ],
+ ConsumedCapacityUnits : 1
+ },
+ Table2 : {
+ Items:[
+ {"brand": {"S":"Nike"},"price": {"N":"33.99"} },
+ {"brand": {"S":"Adidas"},"price": {"N":"22.99"} }
+ ],
+ ConsumedCapacityUnits : 4
+ }
+ }};
+
+ client._request = function(action, options, cb) {
+ cb(null, response);
+ };
+
+ var options = {
+ "Table1": {keys:[{hash: "blah"}, {hash: "moreBlah"}]},
+ "Table2": {keys:[{hash: "anotherKey", range: 123}]}
+ }
+
+ client.batchGetItem(options, function(err, resp, meta){
+ resp.Table1.should.have.lengthOf(2);
+ resp.Table1[0].should.eql({name: 'Bob', Age: 22});
+ resp.Table1[1].should.eql({name: 'Dan', Age: 66});
+
+ resp.Table2.should.have.lengthOf(2);
+ resp.Table2[0].should.eql({brand: 'Nike', price: 33.99});
+ resp.Table2[1].should.eql({brand: 'Adidas', price: 22.99});
+
+ meta.ConsumedCapacityUnits.Table1.should.equal(1);
+ meta.ConsumedCapacityUnits.Table2.should.equal(4);
+
+ done();
+ });
+ });
+
});
});
\ No newline at end of file
|
testing parsing response for get batch item
|
Wantworthy_dynode
|
train
|
2be0862cd6dc3ebb422eabbe459067446edfb614
|
diff --git a/chef/lib/chef/node/attribute.rb b/chef/lib/chef/node/attribute.rb
index <HASH>..<HASH> 100644
--- a/chef/lib/chef/node/attribute.rb
+++ b/chef/lib/chef/node/attribute.rb
@@ -22,7 +22,7 @@ require 'chef/log'
class Chef
class Node
class Attribute
- attr_accessor :attribute, :default, :override, :state, :current_attribute, :current_default, :current_override, :auto_vivifiy_on_read, :set_unless_value_present
+ attr_accessor :attribute, :default, :override, :state, :current_attribute, :current_default, :current_override, :auto_vivifiy_on_read, :set_unless_value_present, :has_been_read
def initialize(attribute, default, override, state=[])
@attribute = attribute
@@ -34,6 +34,7 @@ class Chef
@state = state
@auto_vivifiy_on_read = false
@set_unless_value_present = false
+ @has_been_read = false
end
# Reset our internal state to the top of every tree
@@ -41,13 +42,17 @@ class Chef
@current_attribute = @attribute
@current_default = @default
@current_override = @override
+ @has_been_read = false
@state = []
end
def [](key)
-
@state << key
+ # We set this to so that we can cope with ||= as a setting.
+ # See the comments in []= for more details.
+ @has_been_read = true
+
o_value = value_or_descend(current_override, key, auto_vivifiy_on_read)
a_value = value_or_descend(current_attribute, key, auto_vivifiy_on_read)
d_value = value_or_descend(current_default, key, auto_vivifiy_on_read)
@@ -170,6 +175,14 @@ class Chef
end
end
+ # If we have been read, and the key we are writing is the same
+ # as our parent, we have most like been ||='ed. So we need to
+ # just rewind a bit.
+ #
+ # In practice, these objects are single use - this is just
+ # supporting one more single-use style.
+ @state.pop if @has_been_read && @state.last == key
+
set_value(@attribute, key, value)
set_value(@override, key, value)
value
@@ -203,7 +216,7 @@ class Chef
def auto_vivifiy(data_hash, key)
if data_hash.has_key?(key)
unless data_hash[key].respond_to?(:has_key?)
- raise ArgumentError, "You tried to set a nested key, where the parent is not a hash-like object." unless auto_vivifiy_on_read
+ raise ArgumentError, "You tried to set a nested key, where the parent is not a hash-like object: #{@state.join("/")}/#{key} " unless auto_vivifiy_on_read
end
else
data_hash[key] = Mash.new
diff --git a/chef/spec/unit/node/attribute_spec.rb b/chef/spec/unit/node/attribute_spec.rb
index <HASH>..<HASH> 100644
--- a/chef/spec/unit/node/attribute_spec.rb
+++ b/chef/spec/unit/node/attribute_spec.rb
@@ -369,6 +369,12 @@ describe Chef::Node::Attribute do
@attributes["fire"] = "secret life"
@attributes["fire"].should == "still burn"
end
+
+ it "should write to an attribute that has been read before properly" do
+ @attributes["foo"] = Mash.new
+ @attributes["foo"]["bar"] ||= "stop the world"
+ @attributes["foo"]["bar"].should == "stop the world"
+ end
end
describe "get_value" do
|
Fixing CHEF-<I>, adding in support for ||= in Chef::Node::Attribute objects
|
chef_chef
|
train
|
179b371f0535ea61f107e64ebc43e9ff4024184b
|
diff --git a/app/controllers/api/foreman/smart_proxies_controller.rb b/app/controllers/api/foreman/smart_proxies_controller.rb
index <HASH>..<HASH> 100644
--- a/app/controllers/api/foreman/smart_proxies_controller.rb
+++ b/app/controllers/api/foreman/smart_proxies_controller.rb
@@ -17,13 +17,13 @@ class Api::Foreman::SmartProxiesController < Api::Foreman::SimpleCrudController
A smart proxy is an autonomous web-based foreman component that is placed on
a host performing a specific function in the host commissioning phase.
It receives requests from Foreman to perform operations that are required
- during the commissioning process and executes them on its behalf.
+ during the commissioning process and executes them on its behalf.
More details can be found on the Foreman Architecture page.
- To fully manage the commissioning process then a smart proxy
- will have to manipulate these services, DHCP, DNS, Puppet CA, Puppet and TFTP.
- These services may exist on separate machines or several of them may be hosted
- on the same machine. As each smart proxy instance is capable of managing all
+ To fully manage the commissioning process then a smart proxy
+ will have to manipulate these services, DHCP, DNS, Puppet CA, Puppet and TFTP.
+ These services may exist on separate machines or several of them may be hosted
+ on the same machine. As each smart proxy instance is capable of managing all
of these services, there is only need for one proxy per host.
The Domains API is available only if support for Foreman is installed.
@@ -39,6 +39,10 @@ class Api::Foreman::SmartProxiesController < Api::Foreman::SimpleCrudController
super params.slice('order', 'search')
end
+ def show_json_options
+ { :only => [:name, :url, :features] }
+ end
+
api :GET, "/smart_proxies/:id/", "Show a smart proxy."
param :id, String, "domain name (no slashes)"
def show
|
smart proxies - listing available features in cli info
|
Katello_katello
|
train
|
91d44085a803c97f48bfa9cb4f3f3af9cb0b2a5a
|
diff --git a/hydra-spring/src/test/java/de/escalon/hypermedia/spring/HydraMessageConverterTest.java b/hydra-spring/src/test/java/de/escalon/hypermedia/spring/HydraMessageConverterTest.java
index <HASH>..<HASH> 100644
--- a/hydra-spring/src/test/java/de/escalon/hypermedia/spring/HydraMessageConverterTest.java
+++ b/hydra-spring/src/test/java/de/escalon/hypermedia/spring/HydraMessageConverterTest.java
@@ -57,6 +57,8 @@ public class HydraMessageConverterTest {
public static final Logger LOG = LoggerFactory.getLogger(HydraMessageConverterTest.class);
+ public static final MediaType APPLICATION_JSONLD = MediaType.parseMediaType("application/ld+json");
+
@Configuration
@EnableWebMvc
static class WebConfig extends WebMvcConfigurerAdapter {
@@ -88,7 +90,7 @@ public class HydraMessageConverterTest {
objectMapper.registerModule(module);
converter.setObjectMapper(objectMapper);
converter.setSupportedMediaTypes(
- Arrays.asList(MediaType.parseMediaType("application/ld+json")));
+ Arrays.asList(APPLICATION_JSONLD));
return converter;
}
}
@@ -106,10 +108,10 @@ public class HydraMessageConverterTest {
@Test
public void convertsResource() throws Exception {
final MvcResult result = this.mockMvc.perform(MockMvcRequestBuilders.get("/events/1")
- .accept(MediaType.APPLICATION_JSON))
+ .accept(APPLICATION_JSONLD))
.andExpect(MockMvcResultMatchers.status()
.isOk())
- .andExpect(content().contentType("application/json;charset=UTF-8"))
+ .andExpect(content().contentType("application/ld+json"))
.andExpect(jsonPath("$.@type").value("Event"))
.andExpect(jsonPath("$.performer").value("Cornelia Bielefeldt"))
.andExpect(jsonPath("$.reviews.@id").value("http://localhost/reviews"))
@@ -121,10 +123,10 @@ public class HydraMessageConverterTest {
@Test
public void convertsResourceSupport() throws Exception {
final MvcResult result = this.mockMvc.perform(MockMvcRequestBuilders.get("/events/resourcesupport/1")
- .accept(MediaType.APPLICATION_JSON))
+ .accept(APPLICATION_JSONLD))
.andExpect(MockMvcResultMatchers.status()
.isOk())
- .andExpect(content().contentType("application/json;charset=UTF-8"))
+ .andExpect(content().contentType("application/ld+json"))
.andExpect(jsonPath("$.@type").value("Event"))
.andExpect(jsonPath("$.performer").value("Cornelia Bielefeldt"))
.andExpect(jsonPath("$.reviews.@id").value("http://localhost/reviews"))
@@ -136,7 +138,7 @@ public class HydraMessageConverterTest {
@Test
public void convertsListOfResourceOfEvent() throws Exception {
final MvcResult result = this.mockMvc.perform(MockMvcRequestBuilders.get("/events/list")
- .accept(MediaType.APPLICATION_JSON))
+ .accept(APPLICATION_JSONLD))
.andExpect(MockMvcResultMatchers.status()
.isOk())
.andExpect(jsonPath("$.[0].@type").value("Event"))
@@ -150,10 +152,10 @@ public class HydraMessageConverterTest {
@Test
public void convertsResources() throws Exception {
final MvcResult result = this.mockMvc.perform(MockMvcRequestBuilders.get("/events")
- .accept(MediaType.APPLICATION_JSON))
+ .accept(APPLICATION_JSONLD))
.andExpect(MockMvcResultMatchers.status()
.isOk())
- .andExpect(content().contentType("application/json;charset=UTF-8"))
+ .andExpect(content().contentType("application/ld+json"))
.andExpect(jsonPath("$.@type").value("hydra:Collection"))
.andExpect(jsonPath("$.['hydra:member'][0].@type").value("Event"))
.andExpect(jsonPath("$.['hydra:member'][0].performer").value("Walk off the Earth"))
|
Fixed test after introduction of mediaType ld+json
|
dschulten_hydra-java
|
train
|
6e17647bd6365929a9c289ccb65307a52a8c515d
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -7,7 +7,7 @@ import sys
kernel_json = {"argv":["sys.executable","-m","sas_kernel", "-f", "{connection_file}"],
"display_name":"SAS",
- "codemirror_mode":"shell",
+ "codemirror_mode":"sql",
#"env":{"PS1": "$"},
"language":"sas"
}
|
update the codemirror_mode to sql to see if that controls the ipython syntax coloring
|
sassoftware_sas_kernel
|
train
|
81c1f1594e513f0a50125a808c93d370f348b5e2
|
diff --git a/dev/com.ibm.ws.concurrent.persistent_fat_configupdate/fat/src/com/ibm/ws/concurrent/persistent/fat/configupd/PersistentExecutorConfigUpdateTest.java b/dev/com.ibm.ws.concurrent.persistent_fat_configupdate/fat/src/com/ibm/ws/concurrent/persistent/fat/configupd/PersistentExecutorConfigUpdateTest.java
index <HASH>..<HASH> 100755
--- a/dev/com.ibm.ws.concurrent.persistent_fat_configupdate/fat/src/com/ibm/ws/concurrent/persistent/fat/configupd/PersistentExecutorConfigUpdateTest.java
+++ b/dev/com.ibm.ws.concurrent.persistent_fat_configupdate/fat/src/com/ibm/ws/concurrent/persistent/fat/configupd/PersistentExecutorConfigUpdateTest.java
@@ -117,7 +117,10 @@ public class PersistentExecutorConfigUpdateTest {
@AfterClass
public static void tearDown() throws Exception {
if (server != null && server.isStarted()) {
- server.stopServer("CWNEN1000E");
+ server.stopServer(
+ "CWNEN1000E",
+ "CWWKC1556W" // Execution of tasks deferred during config update
+ );
}
}
|
Issue #<I> PersistentExecutorConfigUpdateTest should expect CWWKC<I>W
|
OpenLiberty_open-liberty
|
train
|
346fb186925575c4fcbd9b1cab6e83da62dec48e
|
diff --git a/lib/right_develop/ci/util.rb b/lib/right_develop/ci/util.rb
index <HASH>..<HASH> 100644
--- a/lib/right_develop/ci/util.rb
+++ b/lib/right_develop/ci/util.rb
@@ -1,3 +1,7 @@
+if RUBY_VERSION =~ /^1\.8/
+ require 'iconv'
+end
+
module RightDevelop::CI
module Util
module_function
@@ -37,14 +41,18 @@ module RightDevelop::CI
result
end
- # Strip invalid UTF-8 characters from a string. If test output contains weird characters,
+ # Strip invalid UTF-8 sequences from a string. If test output contains weird data,
# we could end up generating invalid JUnit XML which will choke Java. Preserve the purity of
# essence of our precious XML fluids!
+ #
+ # @return [String] the input with all invalid UTF-8 replaced by the empty string
+ # @param [String] untrusted a string (of any encoding) that might contain invalid UTF-8 sequences
def purify(untrusted)
if RUBY_VERSION =~ /^1\.8/
- untrusted.unpack('C*').pack('U*')
+ iconv = Iconv.new('UTF-8//IGNORE', 'UTF-8')
+ iconv.iconv(untrusted)
else
- untrusted.force_encoding(Encoding::BINARY).encode('UTF-8', :undef=>:replace, :replace=>'?')
+ untrusted.force_encoding(Encoding::BINARY).encode('UTF-8', :undef=>:replace, :replace=>'')
end
end
end
diff --git a/spec/right_develop/ci/util_spec.rb b/spec/right_develop/ci/util_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/right_develop/ci/util_spec.rb
+++ b/spec/right_develop/ci/util_spec.rb
@@ -25,9 +25,9 @@ describe RightDevelop::CI::Util do
it 'strips invalid UTF-8' do
result = subject.purify(bad_utf8)
if RUBY_VERSION =~ /^1\.8/
- expect(result).to eq "hello\303\201world"
+ expect(result).to eq "helloworld"
else
- expect(result).to eq 'hello?world'
+ expect(result).to eq 'helloworld'
end
end
end
|
Purify using iconv to avoid 8-bit characters entirely
|
rightscale_right_develop
|
train
|
def638ff880d368e5c4cf9bb440b873df1656a9b
|
diff --git a/lib/Doctrine/DBAL/Driver/OCI8/OCI8Statement.php b/lib/Doctrine/DBAL/Driver/OCI8/OCI8Statement.php
index <HASH>..<HASH> 100644
--- a/lib/Doctrine/DBAL/Driver/OCI8/OCI8Statement.php
+++ b/lib/Doctrine/DBAL/Driver/OCI8/OCI8Statement.php
@@ -226,7 +226,6 @@ class OCI8Statement implements \IteratorAggregate, Statement
if ( ! isset(self::$fetchStyleMap[$fetchStyle])) {
throw new \InvalidArgumentException("Invalid fetch style: " . $fetchStyle);
}
-<<<<<<< HEAD
$result = array();
if (self::$fetchStyleMap[$fetchStyle] === OCI_BOTH) {
@@ -234,22 +233,17 @@ class OCI8Statement implements \IteratorAggregate, Statement
$result[] = $row;
}
} else {
- oci_fetch_all($this->_sth, $result, 0, -1,
- self::$fetchStyleMap[$fetchStyle] | OCI_RETURN_NULLS | OCI_FETCHSTATEMENT_BY_ROW | OCI_RETURN_LOBS);
-=======
-
- $fetchStructure = OCI_FETCHSTATEMENT_BY_ROW;
- if ($fetchStyle == PDO::FETCH_COLUMN) {
- $fetchStructure = OCI_FETCHSTATEMENT_BY_COLUMN;
- }
+ $fetchStructure = OCI_FETCHSTATEMENT_BY_ROW;
+ if ($fetchStyle == PDO::FETCH_COLUMN) {
+ $fetchStructure = OCI_FETCHSTATEMENT_BY_COLUMN;
+ }
- $result = array();
- oci_fetch_all($this->_sth, $result, 0, -1,
- self::$fetchStyleMap[$fetchStyle] | OCI_RETURN_NULLS | $fetchStructure | OCI_RETURN_LOBS);
+ oci_fetch_all($this->_sth, $result, 0, -1,
+ self::$fetchStyleMap[$fetchStyle] | OCI_RETURN_NULLS | $fetchStructure | OCI_RETURN_LOBS);
- if ($fetchStyle == PDO::FETCH_COLUMN) {
- $result = $result[0];
->>>>>>> Adding PDO::FETCH_COLUMN support to OCI8 fetchAll method using the fetch structure OCI_FETCHSTATEMENT_BY_COLUMN.
+ if ($fetchStyle == PDO::FETCH_COLUMN) {
+ $result = $result[0];
+ }
}
return $result;
diff --git a/tests/Doctrine/Tests/DBAL/Functional/DataAccessTest.php b/tests/Doctrine/Tests/DBAL/Functional/DataAccessTest.php
index <HASH>..<HASH> 100644
--- a/tests/Doctrine/Tests/DBAL/Functional/DataAccessTest.php
+++ b/tests/Doctrine/Tests/DBAL/Functional/DataAccessTest.php
@@ -434,6 +434,23 @@ class DataAccessTest extends \Doctrine\Tests\DbalFunctionalTestCase
$this->assertEquals('foo', $results[0]->test_string);
$this->assertStringStartsWith('2010-01-01 10:10:10', $results[0]->test_datetime);
}
+
+ /**
+ * @group DBAL-241
+ */
+ public function testFetchAllStyleColumn()
+ {
+ $sql = "DELETE FROM fetch_table";
+ $this->_conn->executeUpdate($sql);
+
+ $this->_conn->insert('fetch_table', array('test_int' => 1, 'test_string' => 'foo'));
+ $this->_conn->insert('fetch_table', array('test_int' => 10, 'test_string' => 'foo'));
+
+ $sql = "SELECT test_int FROM fetch_table";
+ $rows = $this->_conn->query($sql)->fetchAll(\PDO::FETCH_COLUMN);
+
+ $this->assertEquals(array(1, 10), $rows);
+ }
}
class MyFetchClass
|
[DBAL-<I>] Added platform independent test for Statement#fetchAll(PDO::FETCH_COLUMN)
|
doctrine_dbal
|
train
|
4be4b718ed55b3624c39093ff84cd1a66bb0044c
|
diff --git a/spec/schedule_at_spec.rb b/spec/schedule_at_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/schedule_at_spec.rb
+++ b/spec/schedule_at_spec.rb
@@ -92,15 +92,21 @@ describe Rufus::Scheduler do
it 'accepts a time string' do
job = @scheduler.at('2100-12-12 20:30', :job => true) {}
+ jt = job.time
- expect(job.time).to eq(Time.parse('2100-12-12 20:30'))
+ expect(jt.zone).to eq(Rufus::Scheduler::ZoTime.local_tzone)
+ expect(jt.strftime('%Y-%m-%d %H:%M:%S')).to eq('2100-12-12 20:30:00')
end
it 'accepts a time string with a delta timezone' do
job = @scheduler.at('2100-12-12 20:30 -0200', :job => true) {}
- expect(job.time).to eq(Time.parse('2100-12-12 20:30 -0200'))
+ expect(
+ job.time.strftime('%Y-%m-%d %H:%M:%S %:z')
+ ).to eq(
+ '2100-12-12 20:30:00 -02:00'
+ )
end
it 'accepts a time string with a named timezone' do
|
Adapt at spec to gh-<I>
[ci skip]
|
jmettraux_rufus-scheduler
|
train
|
633ff127e41ea83764e3aed1db26d5151201d9e3
|
diff --git a/tests/PHPUnit/Fixtures/InvalidVisits.php b/tests/PHPUnit/Fixtures/InvalidVisits.php
index <HASH>..<HASH> 100644
--- a/tests/PHPUnit/Fixtures/InvalidVisits.php
+++ b/tests/PHPUnit/Fixtures/InvalidVisits.php
@@ -67,7 +67,7 @@ class Test_Piwik_Fixture_InvalidVisits extends Test_Piwik_BaseFixture
// test with excluded User Agent
$t->setUserAgent('Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.2.6) Gecko/20100625 Firefox/3.6.6 (.NET CLR 3.5.30729) (excludeduseragentstring)');
$t->setIp('211.1.2.3');
- //self::checkResponse($t->doTrackPageView('visit from excluded User Agent'));
+ self::checkResponse($t->doTrackPageView('visit from excluded User Agent'));
// test w/ global excluded User Agent
$t->setUserAgent('Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.2.6) Gecko/20100625 Firefox/3.6.6 (.NET CLR 3.5.30729) (globalexcludeduseragent)');
|
Investigating travis build failure... (<I>th commit)
|
matomo-org_matomo
|
train
|
d2f1ebbac24c4f22adc942c21ae42952c9cd6de2
|
diff --git a/msspeak/voices.py b/msspeak/voices.py
index <HASH>..<HASH> 100644
--- a/msspeak/voices.py
+++ b/msspeak/voices.py
@@ -118,6 +118,12 @@ AZURE_VOICES = [
"Locale": "en-GB"
},
{
+ "Name": "Microsoft Server Speech Text to Speech Voice (en-GB, LibbyNeural)",
+ "ShortName": "en-GB-LibbyNeural",
+ "Gender": "Female",
+ "Locale": "en-GB"
+ },
+ {
"Name": "Microsoft Server Speech Text to Speech Voice (en-IE, Sean)",
"ShortName": "en-IE-Sean",
"Gender": "Male",
@@ -539,6 +545,7 @@ AZURE_VOICES = [
}
]
+
def find_voice(locale, gender, neural=True):
"""
method to find voice
@@ -561,4 +568,4 @@ def find_voice(locale, gender, neural=True):
# voice = find_voice(locale='en-US', gender='Female', neural=True)
-# print('found voice:', voice)
\ No newline at end of file
+# print('found voice:', voice)
|
[fix](voices) add en-GB Neural voice
|
newfies-dialer_python-msspeak
|
train
|
3886e7e015f00cdadd5223ee9bceb4f2bab5e7cd
|
diff --git a/lib/chartkick/remote/helper.rb b/lib/chartkick/remote/helper.rb
index <HASH>..<HASH> 100644
--- a/lib/chartkick/remote/helper.rb
+++ b/lib/chartkick/remote/helper.rb
@@ -33,7 +33,7 @@ module Chartkick::Remote
if chart_id # json request
controller.chartkick_remote_blocks ||= {}
controller.chartkick_remote_blocks[@remote_chart_id] = block
- skip = standalone && chart_id.to_s == @remote_chart_id.to_s
+ skip = standalone && chart_id.to_s != @remote_chart_id.to_s
else
data_source = url_for(params.merge(_chartkick_remote_chart_id: @remote_chart_id, format: :json))
end
|
fix standalone to include, not exclude, the desired chart
|
ashanbrown_chartkick-remote
|
train
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.