hash
stringlengths
40
40
diff
stringlengths
131
114k
message
stringlengths
7
980
project
stringlengths
5
67
split
stringclasses
1 value
38abe556821e7fbb4cd5ed16ba7bf433bb8c31b4
diff --git a/bedup/tracking.py b/bedup/tracking.py index <HASH>..<HASH> 100644 --- a/bedup/tracking.py +++ b/bedup/tracking.py @@ -192,12 +192,15 @@ def track_updated_files(sess, vol, tt): top_generation = get_root_generation(vol.fd) if (vol.last_tracked_size_cutoff is not None and vol.last_tracked_size_cutoff <= vol.size_cutoff): - min_generation = vol.last_tracked_generation + min_generation = vol.last_tracked_generation + 1 else: min_generation = 0 tt.notify( 'Scanning volume %r generations from %d to %d, with size cutoff %d' % (vol.desc, min_generation, top_generation, vol.size_cutoff)) + if min_generation > top_generation: + tt.notify('Generation didn\'t change, skipping scan') + return tt.format( '{elapsed} Updated {desc:counter} items: ' '{path:truncate-left} {desc}') @@ -258,7 +261,7 @@ def track_updated_files(sess, vol, tt): if inode_gen <= vol.last_tracked_generation: continue else: - if inode_gen <= min_generation: + if inode_gen < min_generation: continue if not stat.S_ISREG(mode): continue
Skip scan if the top generation didn't change. Also, make min_generation inclusive, to match the search ioctl's semantics.
g2p_bedup
train
c6694a786ec781424550adc171e2e485ff3b43ac
diff --git a/plugin/geomajas-layer-geotools/src/main/java/org/geomajas/layer/shapeinmem/ShapeInMemLayer.java b/plugin/geomajas-layer-geotools/src/main/java/org/geomajas/layer/shapeinmem/ShapeInMemLayer.java index <HASH>..<HASH> 100644 --- a/plugin/geomajas-layer-geotools/src/main/java/org/geomajas/layer/shapeinmem/ShapeInMemLayer.java +++ b/plugin/geomajas-layer-geotools/src/main/java/org/geomajas/layer/shapeinmem/ShapeInMemLayer.java @@ -42,7 +42,6 @@ import org.geomajas.layer.VectorLayer; import org.geomajas.layer.feature.FeatureModel; import org.geomajas.service.FilterService; import org.geomajas.service.GeoService; -import org.geotools.data.DataStore; import org.geotools.data.shapefile.ShapefileDataStore; import org.geotools.feature.FeatureCollection; import org.geotools.feature.FeatureIterator; @@ -81,6 +80,8 @@ public class ShapeInMemLayer extends FeatureSourceRetriever implements VectorLay private CoordinateReferenceSystem crs; + private URL url; + public CoordinateReferenceSystem getCrs() { return crs; } @@ -99,7 +100,6 @@ public class ShapeInMemLayer extends FeatureSourceRetriever implements VectorLay public void setLayerInfo(VectorLayerInfo layerInfo) throws LayerException { this.layerInfo = layerInfo; - setFeatureSourceName(layerInfo.getFeatureInfo().getDataSourceName()); initCrs(); initFeatures(); } @@ -121,20 +121,8 @@ public class ShapeInMemLayer extends FeatureSourceRetriever implements VectorLay } public void setUrl(URL url) throws LayerException { - try { - InputStream in = url.openStream(); - if (in == null) { - throw new IOException("File not found: " + url); - } - in.close(); - setDataStore(new ShapefileDataStore(url)); - } catch (MalformedURLException e) { - throw new LayerException(ExceptionCode.INVALID_SHAPE_FILE_URL, url); - } catch (IOException ioe) { - throw new LayerException(ExceptionCode.CANNOT_CREATE_LAYER_MODEL, ioe, url); - } catch (GeomajasException ge) { - throw new LayerException(ExceptionCode.CANNOT_CREATE_LAYER_MODEL, ge, url); - } + this.url = url; + initFeatures(); } public void setUrl(String url) throws LayerException { @@ -156,12 +144,6 @@ public class ShapeInMemLayer extends FeatureSourceRetriever implements VectorLay } } - @Override - protected void setDataStore(DataStore dataStore) throws LayerException { - super.setDataStore(dataStore); - initFeatures(); - } - public Iterator<?> getElements(Filter queryFilter) throws LayerException { Filter filter = convertFilter(queryFilter); List<SimpleFeature> filteredList = new ArrayList<SimpleFeature>(); @@ -242,10 +224,18 @@ public class ShapeInMemLayer extends FeatureSourceRetriever implements VectorLay // Private functions: private void initFeatures() throws LayerException { - if (null == layerInfo || null == getDataStore()) { + if (null == layerInfo || null == url) { return; } try { + InputStream in = url.openStream(); + if (in == null) { + throw new IOException("File not found: " + url); + } + in.close(); + setDataStore(new ShapefileDataStore(url)); + setFeatureSourceName(layerInfo.getFeatureInfo().getDataSourceName()); + featureModel = new ShapeInMemFeatureModel(getDataStore(), layerInfo.getFeatureInfo().getDataSourceName(), geoService.getSridFromCrs(layerInfo.getCrs())); FeatureCollection<SimpleFeatureType, SimpleFeature> col = getFeatureSource().getFeatures(); @@ -262,10 +252,15 @@ public class ShapeInMemLayer extends FeatureSourceRetriever implements VectorLay col.close(iterator); // getNextId(); nextId++; + } catch (NumberFormatException nfe) { throw new LayerException(ExceptionCode.FEATURE_MODEL_PROBLEM, nfe); + } catch (MalformedURLException e) { + throw new LayerException(ExceptionCode.INVALID_SHAPE_FILE_URL, url); } catch (IOException ioe) { - throw new LayerException(ExceptionCode.FEATURE_MODEL_PROBLEM, ioe); + throw new LayerException(ExceptionCode.CANNOT_CREATE_LAYER_MODEL, ioe, url); + } catch (GeomajasException ge) { + throw new LayerException(ExceptionCode.CANNOT_CREATE_LAYER_MODEL, ge, url); } }
MAJ-<I> assure setters can be called in any order
geomajas_geomajas-project-server
train
1cf39a58858eceb99f8c28c2e7f93de5b1457fcb
diff --git a/packages/nitro-webpack/readme.md b/packages/nitro-webpack/readme.md index <HASH>..<HASH> 100644 --- a/packages/nitro-webpack/readme.md +++ b/packages/nitro-webpack/readme.md @@ -78,6 +78,13 @@ No loader rule is enabled by default. Activate following prepared rules you need Enable some additional features +#### `options.features.bundleAnalyzer` + +* Type: boolean +* default: false + +`true` will add the bundleAnalyser plugin and opens a browser window with the stats + #### `options.features.gitInfo` * Type: boolean diff --git a/packages/nitro-webpack/webpack-config/webpack.config.dev.js b/packages/nitro-webpack/webpack-config/webpack.config.dev.js index <HASH>..<HASH> 100644 --- a/packages/nitro-webpack/webpack-config/webpack.config.dev.js +++ b/packages/nitro-webpack/webpack-config/webpack.config.dev.js @@ -8,6 +8,7 @@ const MiniCssExtractPlugin = require('mini-css-extract-plugin'); const StyleLintPlugin = require('stylelint-webpack-plugin'); const CaseSensitivePathsPlugin = require('case-sensitive-paths-webpack-plugin'); const ForkTsCheckerWebpackPlugin = require('fork-ts-checker-webpack-plugin'); +const BundleAnalyzerPlugin = require('webpack-bundle-analyzer').BundleAnalyzerPlugin; const hotMiddlewareScript = 'webpack-hot-middleware/client?path=/__webpack_hmr&timeout=20000&reload=true'; const appDirectory = fs.realpathSync(process.cwd()); @@ -268,6 +269,10 @@ module.exports = (options = { rules: {}, features: {} }) => { ); } + if (options.features.bundleAnalyzer) { + webpackConfig.plugins.push(new BundleAnalyzerPlugin()); + } + return webpackConfig; }; diff --git a/packages/nitro-webpack/webpack-config/webpack.config.prod.js b/packages/nitro-webpack/webpack-config/webpack.config.prod.js index <HASH>..<HASH> 100644 --- a/packages/nitro-webpack/webpack-config/webpack.config.prod.js +++ b/packages/nitro-webpack/webpack-config/webpack.config.prod.js @@ -8,7 +8,7 @@ const MiniCssExtractPlugin = require('mini-css-extract-plugin'); const OptimizeCSSAssetsPlugin = require('optimize-css-assets-webpack-plugin'); const CaseSensitivePathsPlugin = require('case-sensitive-paths-webpack-plugin'); const ForkTsCheckerWebpackPlugin = require('fork-ts-checker-webpack-plugin'); -// const BundleAnalyzerPlugin = require('webpack-bundle-analyzer').BundleAnalyzerPlugin; +const BundleAnalyzerPlugin = require('webpack-bundle-analyzer').BundleAnalyzerPlugin; const appDirectory = fs.realpathSync(process.cwd()); @@ -57,7 +57,6 @@ module.exports = (options = { rules: {}, features: {} }) => { plugins: [ new webpack.BannerPlugin({ banner }), new CaseSensitivePathsPlugin({ debug: false }), - // new BundleAnalyzerPlugin(), ], optimization: { // minimizer: [ @@ -310,6 +309,11 @@ module.exports = (options = { rules: {}, features: {} }) => { ); } + // feature bundle analyzer + if (options.features.bundleAnalyzer) { + webpackConfig.plugins.push(new BundleAnalyzerPlugin()); + } + return webpackConfig; }; diff --git a/packages/project-nitro-twig/config/webpack/options.js b/packages/project-nitro-twig/config/webpack/options.js index <HASH>..<HASH> 100644 --- a/packages/project-nitro-twig/config/webpack/options.js +++ b/packages/project-nitro-twig/config/webpack/options.js @@ -8,6 +8,7 @@ const options = { image: true, }, features: { + bundleAnalyzer: false, gitInfo: false, }, }; diff --git a/packages/project-nitro/config/webpack/options.js b/packages/project-nitro/config/webpack/options.js index <HASH>..<HASH> 100644 --- a/packages/project-nitro/config/webpack/options.js +++ b/packages/project-nitro/config/webpack/options.js @@ -8,6 +8,7 @@ const options = { image: true, }, features: { + bundleAnalyzer: false, gitInfo: false, }, };
feature(webpack): add bundle analyser plugin
namics_generator-nitro
train
551ec7b3c97a074bf1a3297b3ebcdef1df96571b
diff --git a/tools/buildbsp.py b/tools/buildbsp.py index <HASH>..<HASH> 100755 --- a/tools/buildbsp.py +++ b/tools/buildbsp.py @@ -28,6 +28,7 @@ games = { 'tf2': {'id': 440}, 'css': {'id': 240}, 'hl2': {'id': 220}, + 'hl2mp': {'id': 320}, 'gm': {'id': 4000} } @@ -63,6 +64,10 @@ if __name__ == '__main__': if sys.platform.startswith('win32'): # Define constants games['tf2']['gamedir'] = os.path.join("team fortress 2", "tf") + games['css']['gamedir'] = os.path.join("counter-strike source", "cstrike") + games['hl2']['gamedir'] = os.path.join("half-life 2", "hl2") + games['hl2mp']['gamedir'] = os.path.join("half-life 2 deathmatch", "hl2mp") + games['gm']['gamedir'] = os.path.join("garrysmod", "garrysmod") # Environmental scan # - Figure out paths we'll need (maybe detect where steam lives?)
Add more game support to build tool (#6)
BHSPitMonkey_vmflib
train
ef92b9d931c0be56e3b73797737c99366b9bf3ba
diff --git a/pagseguro/api.py b/pagseguro/api.py index <HASH>..<HASH> 100644 --- a/pagseguro/api.py +++ b/pagseguro/api.py @@ -1,4 +1,4 @@ -#-*- coding: utf-8 -*- +# -*- coding: utf-8 -*- from __future__ import unicode_literals import requests import xmltodict diff --git a/pagseguro/settings.py b/pagseguro/settings.py index <HASH>..<HASH> 100644 --- a/pagseguro/settings.py +++ b/pagseguro/settings.py @@ -1,4 +1,4 @@ -#-*- coding: utf-8 -*- +# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.conf import settings diff --git a/pagseguro/signals.py b/pagseguro/signals.py index <HASH>..<HASH> 100644 --- a/pagseguro/signals.py +++ b/pagseguro/signals.py @@ -1,4 +1,4 @@ -#-*- coding: utf-8 -*- +# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.dispatch import Signal
Convenção PEP8 (E<I>): block comment should start with ‘# ‘
allisson_django-pagseguro2
train
06cda51629e2ffb1fcf9d51516bd05bba8cfda8a
diff --git a/Controller/AzineEmailTemplateController.php b/Controller/AzineEmailTemplateController.php index <HASH>..<HASH> 100644 --- a/Controller/AzineEmailTemplateController.php +++ b/Controller/AzineEmailTemplateController.php @@ -46,6 +46,7 @@ class AzineEmailTemplateController extends ContainerAware /** * Show a web-preview-version of an email-template, filled with dummy-content * @param string $format + * @return Response */ public function webPreViewAction($template, $format = null) { @@ -85,6 +86,7 @@ class AzineEmailTemplateController extends ContainerAware // add campaign tracking params $campaignParams = $this->getTemplateProviderService()->getCampaignParamsFor($template, $emailVars); + $campaignParams['utm_medium'] = 'webPreview'; if(sizeof($campaignParams) > 0) { $htmlBody = $response->getContent(); $htmlBody = $this->container->get("azine.email.bundle.twig.filters")->addCampaignParamsToAllUrls($htmlBody, $campaignParams);
use different campaign-param for webPreView to not mess up the statistics
azine_email-bundle
train
2f525659b7b4092b9b0213a1dcbd1a44618c5400
diff --git a/app/controllers/application_controller.rb b/app/controllers/application_controller.rb index <HASH>..<HASH> 100644 --- a/app/controllers/application_controller.rb +++ b/app/controllers/application_controller.rb @@ -327,13 +327,15 @@ class ApplicationController < ActionController::Base elsif notice.kind_of? RuntimeError items["notices"].push(notice.message) else - Rails.logger.error("Recieved unrecognized notice: " + notice.inspect) + Rails.logger.error("Received unrecognized notice: " + notice.inspect) items["notices"].push(notice) end end def requested_action - controller_name + '___' + action_name + unless controller_name.nil? or action_name.nil? + controller_name + '___' + action_name + end end def setup_environment_selector org, accessible
notices - change to fix broken tests
Katello_katello
train
6616399c2562c7530e6c16c0f6630bdcb3369464
diff --git a/index.js b/index.js index <HASH>..<HASH> 100755 --- a/index.js +++ b/index.js @@ -197,7 +197,7 @@ FSWatcher.prototype._remove = function(directory, item) { if (!this._throttle('remove', fullPath, 5)) return; // if the only watched file is removed, watch for its return - if (!isDirectory && Object.keys(this.watched).length === 1) { + if (!isDirectory && !this.options.useFsEvents && Object.keys(this.watched).length === 1) { this.add(directory, item); }
Exclude fsevents from rm watched file workaround
paulmillr_chokidar
train
8bfba2087b6916d05d416c0cf4ad5d7b64809ba6
diff --git a/src/behaviours/form.js b/src/behaviours/form.js index <HASH>..<HASH> 100644 --- a/src/behaviours/form.js +++ b/src/behaviours/form.js @@ -37,10 +37,6 @@ const internalMapStateToProps = (state, formKey) => { return resultingProps; }; -function buildActionForTheDispatch (actionForTheDispatch, dispatch) { - return actionForTheDispatch.map(element => dispatch => bindActionCreators) -} - const internalMapDispatchToProps = (dispatch, loadAction, saveAction, formKey, nonValidatedFields,entityPathArray ) => { const resultingActions = {}; if (loadAction) resultingActions.load = (...loadArgs) => dispatch(loadAction( ...loadArgs)); diff --git a/src/components/field.js b/src/components/field.js index <HASH>..<HASH> 100644 --- a/src/components/field.js +++ b/src/components/field.js @@ -21,7 +21,6 @@ class Field extends PureComponent { render(){ const {textOnly, multiple, list, fieldForLine, ...otherProps} = this.props; otherProps.value = otherProps.rawInputValue; //https://github.com/get-focus/focus-redux/issues/39 compatibility with focus components - console.log(otherProps.value) const {TextComponent = DefaultTextComponent, DisplayComponent = DefaultDisplayComponent, InputComponent = DefaultInputComponent, SelectComponent = DefaultSelectComponent,SelectComponentDisplay = DefaultSelectDisplayComponent, ListComponent = DefaultListComponent} = otherProps.metadata; const renderConsult = () => list ? <ListComponent fieldForLine={fieldForLine} values={otherProps.formattedInputValue} {...otherProps}/> : (multiple ? <SelectComponentDisplay {...otherProps} /> : <DisplayComponent {...otherProps} />); const renderEdit = () => list ? <ListComponent fieldForLine={fieldForLine} values={otherProps.formattedInputValue} {...otherProps}/> : (multiple ? <SmartSelectComponent SelectComponent={SelectComponent} {...otherProps}/> : <InputComponent {...otherProps}/>);
[FormBehaviours] possible to give a object for dispatch function
get-focus_deprecated-focus-graph
train
f13e6ca631f087819e9a1d7cc6b2c9ceaae9e39d
diff --git a/lib/model/model.go b/lib/model/model.go index <HASH>..<HASH> 100644 --- a/lib/model/model.go +++ b/lib/model/model.go @@ -1228,11 +1228,10 @@ func (m *model) ClusterConfig(deviceID protocol.DeviceID, cm protocol.ClusterCon } } - changedHere, tempIndexFolders, paused, err := m.ccHandleFolders(cm.Folders, deviceCfg, ccDeviceInfos, indexSenderRegistry) + tempIndexFolders, paused, err := m.ccHandleFolders(cm.Folders, deviceCfg, ccDeviceInfos, indexSenderRegistry) if err != nil { return err } - changed = changed || changedHere m.pmut.Lock() m.remotePausedFolders[deviceID] = paused @@ -1276,8 +1275,7 @@ func (m *model) ClusterConfig(deviceID protocol.DeviceID, cm protocol.ClusterCon return nil } -func (m *model) ccHandleFolders(folders []protocol.Folder, deviceCfg config.DeviceConfiguration, ccDeviceInfos map[string]*indexSenderStartInfo, indexSenders *indexSenderRegistry) (bool, []string, map[string]struct{}, error) { - var changed bool +func (m *model) ccHandleFolders(folders []protocol.Folder, deviceCfg config.DeviceConfiguration, ccDeviceInfos map[string]*indexSenderStartInfo, indexSenders *indexSenderRegistry) ([]string, map[string]struct{}, error) { var folderDevice config.FolderDeviceConfiguration tempIndexFolders := make([]string, 0, len(folders)) paused := make(map[string]struct{}, len(folders)) @@ -1335,7 +1333,7 @@ func (m *model) ccHandleFolders(folders []protocol.Folder, deviceCfg config.Devi l.Warnln(msg) } - return changed, tempIndexFolders, paused, err + return tempIndexFolders, paused, err } if devErrs, ok := m.folderEncryptionFailures[folder.ID]; ok { if len(devErrs) == 1 { @@ -1375,7 +1373,7 @@ func (m *model) ccHandleFolders(folders []protocol.Folder, deviceCfg config.Devi indexSenders.removeAllExcept(seenFolders) - return changed, tempIndexFolders, paused, nil + return tempIndexFolders, paused, nil } func (m *model) ccCheckEncryption(fcfg config.FolderConfiguration, folderDevice config.FolderDeviceConfiguration, ccDeviceInfos *indexSenderStartInfo, deviceUntrusted bool) error {
lib/model: Remove obsolete return val from ccHandleFolders (ref #<I>) (#<I>)
syncthing_syncthing
train
9040f2d5b7750cb842587afe79ebf20cdfd9763a
diff --git a/credstash.py b/credstash.py index <HASH>..<HASH> 100755 --- a/credstash.py +++ b/credstash.py @@ -251,12 +251,13 @@ def clean_fail(func): return func_wrapper -def listSecrets(region=None, table="credential-store", **kwargs): +def listSecrets(region=None, table="credential-store", session=None, **kwargs): ''' do a full-table scan of the credential-store, and return the names and versions of every credential ''' - session = get_session(**kwargs) + if session is None: + session = get_session(**kwargs) dynamodb = session.resource('dynamodb', region_name=region) secrets = dynamodb.Table(table) @@ -344,7 +345,7 @@ def getAllSecrets(version="", region=None, table="credential-store", session = get_session(**kwargs) dynamodb = session.resource('dynamodb', region_name=region) kms = session.client('kms', region_name=region) - secrets = listSecrets(region, table, **kwargs) + secrets = listSecrets(region, table, session, **kwargs) # Only return the secrets that match the pattern in `credential` # This already works out of the box with the CLI get action,
allow use of a custom session with AWS profile information when using the getAllSecrets, and by extenstion the listSecrets
fugue_credstash
train
eb4ac7509cad6b68f22b53e986d4df3e98b775c0
diff --git a/lib/xml_models/identity_metadata/identity_metadata.rb b/lib/xml_models/identity_metadata/identity_metadata.rb index <HASH>..<HASH> 100644 --- a/lib/xml_models/identity_metadata/identity_metadata.rb +++ b/lib/xml_models/identity_metadata/identity_metadata.rb @@ -152,6 +152,12 @@ class IdentityMetadata def get_tags() self.tags.collect { |t| t.value } end + + # Convenience method to return the first agreementId. There's usually only one. + # Returns nil if there are no agreementIds. + def agreementId + @agreementIds.first + end # Return the OtherId hash for the specified identier name
Added convenience method to return the first (and usually only) agreement id
sul-dlss_lyber-core
train
19bda8af3db4c750a6a2df232546343c673fb305
diff --git a/app/js/controllers.js b/app/js/controllers.js index <HASH>..<HASH> 100644 --- a/app/js/controllers.js +++ b/app/js/controllers.js @@ -7,4 +7,4 @@ MyCtrl1.$inject = []; function MyCtrl2() { } -MyCtrl1.$inject = []; +MyCtrl2.$inject = [];
fix typo in controllers.js placeholder code
angular_angular-seed
train
7a4e9e69a6cfafbb260215f0029cbd9c9d93b1c0
diff --git a/lib/core/slider.spec.js b/lib/core/slider.spec.js index <HASH>..<HASH> 100644 --- a/lib/core/slider.spec.js +++ b/lib/core/slider.spec.js @@ -20,6 +20,7 @@ var slider = require('./slider'); var Layout = require('../enums/layout'); var Phase = require('../enums/phase'); +var Theme = require('../enums/theme'); describe('slider', function() { afterEach(function() { @@ -72,6 +73,7 @@ describe('slider', function() { sliderElement = createSliderElement(3); sliderElement.classList.add('hermes-transition--test'); sliderElement.childNodes[0].id = 'first'; + sliderElement.childNodes[0].classList.add(Theme.BLACK); sliderElement.childNodes[1].id = 'second'; sliderElement.childNodes[2].id = 'third'; sliderElementSerialized = JSON.stringify(sliderElement, domSerializeHelper); @@ -151,6 +153,9 @@ describe('slider', function() { expect(testedSlider.currentSlide).toBe(testedSlider.slides[0]); }); + it('then contains a theme class name from first slide', function() { + expect(sliderElement.classList.contains(Theme.BLACK)).toBe(true); + }); it('then contains "hermes-slide-id-first" class', function() { expect(sliderElement.classList.contains('hermes-slide-id-first')).toBe(true); }); @@ -213,6 +218,9 @@ describe('slider', function() { it('then does not contain "hermes-slide-id-first" class', function() { expect(sliderElement.classList.contains('hermes-slide-id-first')).toBe(false); }); + it('then contains a theme class name from second slide', function() { + expect(sliderElement.classList.contains(Theme.WHITE)).toBe(true); + }); it('then contains "hermes-slide-id-second" class', function() { expect(sliderElement.classList.contains('hermes-slide-id-second')).toBe(true); });
+ tested adding theme class name of current slide to the slider
mchalapuk_hyper-text-slider
train
834647cf2987d50987093d2e7528b187544f21cf
diff --git a/napalm/base.py b/napalm/base.py index <HASH>..<HASH> 100644 --- a/napalm/base.py +++ b/napalm/base.py @@ -247,18 +247,20 @@ class NetworkDriver: def get_environment(self): """ Returns a dictionary where: - * fans is a list of dictionaries that describe the state of the fans: - * location (string) - location of the fan + * fans is a dictionary of dictionaries where the key is the location and the values: * status (boolean) - True if it's ok, false if it's broken - * temperature is a list of dictionaries that describe the state of the temperature sensors: - * location (string) - location of the fan + * temperature is a dictionary of dictionaries where the key is the location and the values: * temperature (int) - Temperature in celsius the sensor is reporting. * is_alert (boolean) - True if the temperature is above the alert threshold * is_critical (boolean) - True if the temperature is above the critical threshold - * power is a lit of dictionaries that describe the state of the power supplies: + * power is a dictionary of dictionaries where the key is the PSU id and the values: * status (boolean) - True if it's ok, false if it's broken * capacity (int) - Capacity in W that the power supply can support * output (int) - Watts drawn by the system + * cpu is a dictionary of dictionaries where the key is the ID and the values + * %usage + * available_ram (int) - Total amount of RAM installed in the device + * used_ram (int) - RAM that is still free in the device """ raise NotImplementedError
Added CPU and RAM on get_environment
napalm-automation_napalm-base
train
3b979139dfb2ba89c193ddb92d37e08e2b85299c
diff --git a/test/Unit/Format/SnifferTest.php b/test/Unit/Format/SnifferTest.php index <HASH>..<HASH> 100644 --- a/test/Unit/Format/SnifferTest.php +++ b/test/Unit/Format/SnifferTest.php @@ -135,11 +135,13 @@ final class SnifferTest extends Framework\TestCase } /** - * @dataProvider providerIndent + * @dataProvider providerPureIndentAndSniffedIndent + * @dataProvider providerMixedIndentAndSniffedIndent * * @param string $indent + * @param string $sniffedIndent */ - public function testSniffReturnsFormatWithIndentSniffedFromArray(string $indent): void + public function testSniffReturnsFormatWithIndentSniffedFromArray(string $indent, string $sniffedIndent): void { $json = <<<JSON [ @@ -156,15 +158,17 @@ JSON; $format = $sniffer->sniff($json); $this->assertInstanceOf(FormatInterface::class, $format); - $this->assertSame($indent, $format->indent()); + $this->assertSame($sniffedIndent, $format->indent()); } /** - * @dataProvider providerIndent + * @dataProvider providerPureIndentAndSniffedIndent + * @dataProvider providerMixedIndentAndSniffedIndent * * @param string $indent + * @param string $sniffedIndent */ - public function testSniffReturnsFormatWithIndentSniffedFromObject(string $indent): void + public function testSniffReturnsFormatWithIndentSniffedFromObject(string $indent, string $sniffedIndent): void { $json = <<<JSON { @@ -181,29 +185,60 @@ JSON; $format = $sniffer->sniff($json); $this->assertInstanceOf(FormatInterface::class, $format); - $this->assertSame($indent, $format->indent()); + $this->assertSame($sniffedIndent, $format->indent()); } - public function providerIndent(): \Generator + public function providerPureIndentAndSniffedIndent(): \Generator { $characters = [ - ' ', - "\t", + 'space' => ' ', + 'tab' => "\t", ]; - $counts = [1, 3]; + $sizes = [1, 3]; - foreach ($characters as $character) { - foreach ($counts as $count) { - $indent = \str_repeat($character, $count); + foreach ($characters as $style => $character) { + foreach ($sizes as $size) { + $key = \sprintf( + '%s-%d', + $style, + $size + ); - yield [ - $indent, + $pureIndent = \str_repeat( + $character, + $size + ); + + yield $key => [ + $pureIndent, + $pureIndent, ]; } } } + public function providerMixedIndentAndSniffedIndent(): \Generator + { + $mixedIndents = [ + 'space-and-tab' => [ + " \t", + ' ', + ], + 'tab-and-space' => [ + "\t ", + "\t", + ], + ]; + + foreach ($mixedIndents as $key => [$mixedIndent, $sniffedIndent]) { + yield $key => [ + $mixedIndent, + $sniffedIndent, + ]; + } + } + /** * @dataProvider providerJsonWithoutIndent *
Enhancement: Assert that sniffer sniffs only pure indents
localheinz_json-normalizer
train
17165dbd9462350f72c2878b20872062f6cd737d
diff --git a/lib/metaforce/services/client.rb b/lib/metaforce/services/client.rb index <HASH>..<HASH> 100644 --- a/lib/metaforce/services/client.rb +++ b/lib/metaforce/services/client.rb @@ -27,8 +27,10 @@ module Metaforce password = "#{password}#{security_token}" unless security_token.nil? client = Savon::Client.new File.expand_path("../../../../wsdl/#{Metaforce.configuration.api_version}/partner.xml", __FILE__) do |wsdl| wsdl.endpoint = wsdl.endpoint.to_s.sub(/login/, 'test') if Metaforce.configuration.test + Metaforce.log("Logging in via #{wsdl.endpoint.to_s}") end client.http.auth.ssl.verify_mode = :none + response = client.request(:login) do soap.body = { :username => username,
Log the endpoint used to login.
ejholmes_metaforce
train
ee66ec304f01696e75023dcd592674a071441d57
diff --git a/lib/solargraph/code_map.rb b/lib/solargraph/code_map.rb index <HASH>..<HASH> 100755 --- a/lib/solargraph/code_map.rb +++ b/lib/solargraph/code_map.rb @@ -102,11 +102,17 @@ module Solargraph # # @return [Boolean] def comment_at?(index) + return false if string_at?(index) line, col = Solargraph::ApiMap::Source.get_position_at(source.code, index) return false if source.stubbed_lines.include?(line) @comments.each do |c| return true if index > c.location.expression.begin_pos and index <= c.location.expression.end_pos end + # Extra test due to some comments not getting tracked + while (index > 0 and @code[index] != "\n") + return true if @code[index] == '#' + index -= 1 + end false end @@ -540,6 +546,7 @@ module Solargraph index -=1 in_whitespace = false while index >= 0 + break if index > 0 and comment_at?(index - 1) unless !in_whitespace and string_at?(index) break if brackets > 0 or parens > 0 or squares > 0 char = @code[index, 1] diff --git a/spec/code_map_spec.rb b/spec/code_map_spec.rb index <HASH>..<HASH> 100755 --- a/spec/code_map_spec.rb +++ b/spec/code_map_spec.rb @@ -883,4 +883,16 @@ describe Solargraph::CodeMap do sugg = code_map.suggest_at(code_map.get_offset(1, 10)).map(&:to_s) expect(sugg).to include('String') end + + it "stops reading signatures at comments" do + code_map = Solargraph::CodeMap.new(code: %( + # String + . + ), filename: 'file.rb') + offset = code_map.get_offset(2, 7) + signature = code_map.get_signature_at(offset) + expect(signature).not_to start_with('String') + sugg = code_map.suggest_at(offset) + expect(sugg).to be_empty + end end
castwide/solargraph#<I>
castwide_solargraph
train
1c725a64b00a489224ba33bf88749be47a3f3606
diff --git a/chef/lib/chef/provider/package/rpm.rb b/chef/lib/chef/provider/package/rpm.rb index <HASH>..<HASH> 100644 --- a/chef/lib/chef/provider/package/rpm.rb +++ b/chef/lib/chef/provider/package/rpm.rb @@ -71,11 +71,11 @@ class Chef def install_package(name, version) unless @current_resource.version run_command_with_systems_locale( - :command => "rpm -i #{@new_resource.source}" + :command => "rpm #{@new_resource.options} -i #{@new_resource.source}" ) else run_command_with_systems_locale( - :command => "rpm -U #{@new_resource.source}" + :command => "rpm #{@new_resource.options} -U #{@new_resource.source}" ) end end @@ -85,11 +85,11 @@ class Chef def remove_package(name, version) if version run_command_with_systems_locale( - :command => "rpm -e #{name}-#{version}" + :command => "rpm #{@new_resource.options} -e #{name}-#{version}" ) else run_command_with_systems_locale( - :command => "rpm -e #{name}" + :command => "rpm #{@new_resource.options} -e #{name}" ) end end diff --git a/chef/spec/unit/provider/package/rpm_spec.rb b/chef/spec/unit/provider/package/rpm_spec.rb index <HASH>..<HASH> 100644 --- a/chef/spec/unit/provider/package/rpm_spec.rb +++ b/chef/spec/unit/provider/package/rpm_spec.rb @@ -73,7 +73,7 @@ describe Chef::Provider::Package::Rpm, "load_current_resource" do it "should raise an exception if the source is not set but we are installing" do new_resource = Chef::Resource::Package.new("emacs") provider = Chef::Provider::Package::Rpm.new(new_resource, @run_context) - lambda { provider.load_current_resource }.should raise_error(Chef::Exceptions::Package) + lambda { provider.load_current_resource }.should raise_error(Chef::Exceptions::Package) end it "should raise an exception if rpm fails to run" do @@ -91,7 +91,7 @@ describe Chef::Provider::Package::Rpm, "load_current_resource" do describe Chef::Provider::Package::Rpm, "install and upgrade" do it "should run rpm -i with the package source to install" do @provider.should_receive(:run_command_with_systems_locale).with({ - :command => "rpm -i /tmp/emacs-21.4-20.el5.i386.rpm" + :command => "rpm -i /tmp/emacs-21.4-20.el5.i386.rpm" }) @provider.install_package("emacs", "21.4-20.el5") end @@ -99,7 +99,7 @@ describe Chef::Provider::Package::Rpm, "load_current_resource" do it "should run rpm -U with the package source to upgrade" do @current_resource.stub!(:version).and_return("21.4-19.el5") @provider.should_receive(:run_command_with_systems_locale).with({ - :command => "rpm -U /tmp/emacs-21.4-20.el5.i386.rpm" + :command => "rpm -U /tmp/emacs-21.4-20.el5.i386.rpm" }) @provider.upgrade_package("emacs", "21.4-20.el5") end @@ -108,10 +108,26 @@ describe Chef::Provider::Package::Rpm, "load_current_resource" do describe Chef::Provider::Package::Rpm, "remove" do it "should run rpm -e to remove the package" do @provider.should_receive(:run_command_with_systems_locale).with({ - :command => "rpm -e emacs-21.4-20.el5" + :command => "rpm -e emacs-21.4-20.el5" }) @provider.remove_package("emacs", "21.4-20.el5") end end end + + describe "execute rpm command with options" do + before do + @provider.current_resource = @current_resource + end + + it "should have options set" do + @provider.candidate_version = '11' + @new_resource.stub!(:options).and_return("--dbpath /var/lib/rpm") + @provider.should_receive(:run_command_with_systems_locale).with({ + :command => "rpm --dbpath /var/lib/rpm -i /tmp/emacs-21.4-20.el5.i386.rpm" + }) + @provider.install_package(@new_resource.name, @provider.candidate_version) + end + end end +
CHEF-<I> options directive not working for RPM or Yum providers
chef_chef
train
d10323188b0df7de9383bd73245f5ef1b3effd19
diff --git a/modules/activiti-engine/src/test/java/org/activiti/engine/test/bpmn/gateway/InclusiveGatewayTest.java b/modules/activiti-engine/src/test/java/org/activiti/engine/test/bpmn/gateway/InclusiveGatewayTest.java index <HASH>..<HASH> 100644 --- a/modules/activiti-engine/src/test/java/org/activiti/engine/test/bpmn/gateway/InclusiveGatewayTest.java +++ b/modules/activiti-engine/src/test/java/org/activiti/engine/test/bpmn/gateway/InclusiveGatewayTest.java @@ -439,36 +439,36 @@ public class InclusiveGatewayTest extends PluggableActivitiTestCase { assertNull(processInstance); } - /* This test case is related to ACT-1877 */ - - @Deployment(resources={"org/activiti/engine/test/bpmn/gateway/InclusiveGatewayTest.testWithSignalBoundaryEvent.bpmn20.xml"}) - public void testJoinAfterBoudarySignalEvent() { - - - ProcessInstance processInstanceId = runtimeService.startProcessInstanceByKey("InclusiveGatewayAfterSignalBoundaryEvent"); - - /// Gets the execution waiting for a message notification*/ - String subcriptedExecutionId = runtimeService.createExecutionQuery().processInstanceId(processInstanceId.getId()).messageEventSubscriptionName("MyMessage").singleResult().getId(); - - /*Notify message received: this makes one execution to go on*/ - runtimeService.messageEventReceived("MyMessage", subcriptedExecutionId); - - /*The other execution goes on*/ - Task userTask = taskService.createTaskQuery().processInstanceId(processInstanceId.getId()).singleResult(); - assertEquals("There's still an active execution waiting in the first task", - "usertask1",userTask.getTaskDefinitionKey()); - - taskService.complete( userTask.getId()); - - /*The two executions become one because of Inclusive Gateway*/ - /*The process ends*/ - userTask = taskService.createTaskQuery().processInstanceId(processInstanceId.getId()).singleResult(); - assertEquals("Only when both executions reach the inclusive gateway, flow arrives to the last user task", - "usertask2",userTask.getTaskDefinitionKey()); - taskService.complete(userTask.getId()); - - long nExecutions = runtimeService.createExecutionQuery().processInstanceId(processInstanceId.getId()).count(); - assertEquals(0, nExecutions); - - } +// /* This test case is related to ACT-1877 */ +// +// @Deployment(resources={"org/activiti/engine/test/bpmn/gateway/InclusiveGatewayTest.testWithSignalBoundaryEvent.bpmn20.xml"}) +// public void testJoinAfterBoudarySignalEvent() { +// +// +// ProcessInstance processInstanceId = runtimeService.startProcessInstanceByKey("InclusiveGatewayAfterSignalBoundaryEvent"); +// +// /// Gets the execution waiting for a message notification*/ +// String subcriptedExecutionId = runtimeService.createExecutionQuery().processInstanceId(processInstanceId.getId()).messageEventSubscriptionName("MyMessage").singleResult().getId(); +// +// /*Notify message received: this makes one execution to go on*/ +// runtimeService.messageEventReceived("MyMessage", subcriptedExecutionId); +// +// /*The other execution goes on*/ +// Task userTask = taskService.createTaskQuery().processInstanceId(processInstanceId.getId()).singleResult(); +// assertEquals("There's still an active execution waiting in the first task", +// "usertask1",userTask.getTaskDefinitionKey()); +// +// taskService.complete( userTask.getId()); +// +// /*The two executions become one because of Inclusive Gateway*/ +// /*The process ends*/ +// userTask = taskService.createTaskQuery().processInstanceId(processInstanceId.getId()).singleResult(); +// assertEquals("Only when both executions reach the inclusive gateway, flow arrives to the last user task", +// "usertask2",userTask.getTaskDefinitionKey()); +// taskService.complete(userTask.getId()); +// +// long nExecutions = runtimeService.createExecutionQuery().processInstanceId(processInstanceId.getId()).count(); +// assertEquals(0, nExecutions); +// +// } }
Excluding testcase untill pull-request is updated with test-bpmn
Activiti_Activiti
train
a02e6cd4cc5cfd3480f2826f7753e652b3660754
diff --git a/lib/saddle/options.rb b/lib/saddle/options.rb index <HASH>..<HASH> 100644 --- a/lib/saddle/options.rb +++ b/lib/saddle/options.rb @@ -16,7 +16,7 @@ module Saddle::Options :request_style => request_style, :num_retries => num_retries, :timeout => timeout, - :additional_middlewares => additional_middlewares, + :additional_middlewares => @@additional_middlewares, :stubs => stubs, } end @@ -52,17 +52,18 @@ module Saddle::Options 30 end - # Override this to add additional middleware to the request stack + # Use this to add additional middleware to the request stack # ex: - # - # require 'my_middleware' - # def self.default_middleware - # [MyMiddleware] + # add_middleware({ + # :klass => MyMiddleware, + # :args => [arg1, arg2], + # }) # end # ### - def additional_middlewares - [] + @@additional_middlewares = [] + def add_middleware m + @@additional_middlewares << m end # If the Typhoeus adapter is being used, pass stubs to it for testing.
additional middleware can be added one at a time
mLewisLogic_saddle
train
062a7522c2fb1dcf8a4efd65a5297dc410421062
diff --git a/lib/questionlib.php b/lib/questionlib.php index <HASH>..<HASH> 100644 --- a/lib/questionlib.php +++ b/lib/questionlib.php @@ -1331,73 +1331,84 @@ function sort_categories_by_tree(&$categories, $id = 0, $level = 1) { } /** - * flattens tree structure created by add_indented_named - * (adding the names) - * @param array cats tree structure of categories - * @param int depth tree depth tracker (for indenting) - * @return array flattened, formatted list + * Private method, only for the use of add_indented_names(). + * + * Recursively adds an indentedname field to each category, starting with the category + * with id $id, and dealing with that category and all its children, and + * return a new array, with those categories in the right order. + * + * @param array $categories an array of categories which has had childids + * fields added by flatten_category_tree(). Passed by reference for + * performance only. It is not modfied. + * @param int $id the category to start the indenting process from. + * @param int $depth the indent depth. Used in recursive calls. + * @return array a new array of categories, in the right order for the tree. */ -function flatten_category_tree( $cats, $depth=0 ) { - $newcats = array(); - $fillstr = '&nbsp;&nbsp;&nbsp;'; - - foreach ($cats as $key => $cat) { - $newcats[$key] = $cat; - $newcats[$key]->indentedname = str_repeat($fillstr,$depth) . $cat->name; - // recurse if the category has children - if (!empty($cat->children)) { - $newcats += flatten_category_tree( $cat->children, $depth+1 ); - } +function flatten_category_tree(&$categories, $id, $depth = 0) { + + // Indent the name of this category. + $newcategories = array(); + $newcategories[$id] = $categories[$id]; + $newcategories[$id]->indentedname = str_repeat('&nbsp;&nbsp;&nbsp;', $depth) . $categories[$id]->name; + + // Recursively indent the children. + foreach ($categories[$id]->childids as $childid) { + $newcategories = $newcategories + flatten_category_tree($categories, $childid, $depth + 1); } - - return $newcats; + + // Remove the childids array that were temporarily added. + unset($newcategories[$id]->childids); + + return $newcategories; } /** - * format categories into indented list - * @param array categories categories array (from db) - * @return array formatted list of categories + * Format categories into an indented list reflecting the tree structure. + * + * @param array $categories An array of category objects, for example from the. + * @return array The formatted list of categories. */ -function add_indented_names( $categories ) { +function add_indented_names($categories) { - // iterate through categories adding new fields - // and creating references - foreach ($categories as $key => $category) { - $categories[$key]->children = array(); - $categories[$key]->link = &$categories[$key]; + // Add an array to each category to hold the child category ids. This array will be removed + // again by flatten_category_tree(). It should not be used outside these two functions. + foreach (array_keys($categories) as $id) { + $categories[$id]->childids = array(); } - // create tree structure of children - // link field is used to track 'new' place of category in tree - foreach ($categories as $key => $category) { - if (!empty($category->parent)) { - $categories[$category->parent]->link->children[$key] = $categories[$key]; - $categories[$key]->link = &$categories[$category->parent]->link->children[$key]; + // Build the tree structure, and record which categories are top-level. + // We have to be careful, because the categories array may include published + // categories from other courses, but not their parents. + $toplevelcategoryids = array(); + foreach (array_keys($categories) as $id) { + if (!empty($categories[$id]->parent) && array_key_exists($categories[$id]->parent, $categories)) { + $categories[$categories[$id]->parent]->childids[] = $id; + } else { + $toplevelcategoryids[] = $id; } } - // remove top level categories with parents - $newcats = array(); - foreach ($categories as $key => $category) { - unset( $category->link ); - if (empty($category->parent)) { - $newcats[$key] = $category; - } + // Flatten the tree to and add the indents. + $newcategories = array(); + foreach ($toplevelcategoryids as $id) { + $newcategories = $newcategories + flatten_category_tree($categories, $id); } - // walk the tree to flatten revised structure - $categories = flatten_category_tree( $newcats ); - - return $categories; + return $newcategories; } /** -* Displays a select menu of categories with appended course names -* -* Optionaly non editable categories may be excluded. -* @author Howard Miller June '04 -*/ -function question_category_select_menu($courseid,$published=false,$only_editable=false,$selected="") { + * Output a select menu of question categories. + * + * Categories from this course and (optionally) published categories from other courses + * are included. Optionally, only categories the current user may edit can be included. + * + * @param integer $courseid the id of the course to get the categories for. + * @param integer $published if true, include publised categories from other courses. + * @param integer $only_editable if true, exclude categories this user is not allowed to edit. + * @param integer $selected optionally, the id of a category to be selected by default in the dropdown. + */ +function question_category_select_menu($courseid, $published = false, $only_editable = false, $selected = "") { // get sql fragment for published $publishsql="";
Bug <I> - Rewrite the code for making an indented list of question categories, becase the existing code was so horrible. Merged from MOODLE_<I>_STABLE.
moodle_moodle
train
a0ff82c275b85f2b5d6a63693b9124771bd122af
diff --git a/src/Illuminate/Support/Str.php b/src/Illuminate/Support/Str.php index <HASH>..<HASH> 100755 --- a/src/Illuminate/Support/Str.php +++ b/src/Illuminate/Support/Str.php @@ -134,10 +134,13 @@ class Str { */ public static function words($value, $words = 100, $end = '...') { - if (trim($value) == '') return ''; - preg_match('/^\s*+(?:\S++\s*+){1,'.$words.'}/u', $value, $matches); + if (!isset($matches[0])) + { + return ''; + } + if (strlen($value) == strlen($matches[0])) { $end = ''; diff --git a/tests/Support/SupportStrTest.php b/tests/Support/SupportStrTest.php index <HASH>..<HASH> 100755 --- a/tests/Support/SupportStrTest.php +++ b/tests/Support/SupportStrTest.php @@ -18,6 +18,14 @@ class SupportStrTest extends PHPUnit_Framework_TestCase } + public function testStringWithoutWordsDoesntProduceError() + { + $nbsp = chr(0xC2).chr(0xA0); + $this->assertEquals('', Str::words(' ')); + $this->assertEquals('', Str::words($nbsp)); + } + + public function testStringMacros() { Illuminate\Support\Str::macro(__CLASS__, function() { return 'foo'; });
Str:words produced error if given string contains non-ascii whitespaces but no words.
laravel_framework
train
5c1306b054c0101a19a061b9791ebc8ec361a820
diff --git a/src/org/mockito/internal/creation/MethodInterceptorFilter.java b/src/org/mockito/internal/creation/MethodInterceptorFilter.java index <HASH>..<HASH> 100644 --- a/src/org/mockito/internal/creation/MethodInterceptorFilter.java +++ b/src/org/mockito/internal/creation/MethodInterceptorFilter.java @@ -7,6 +7,7 @@ package org.mockito.internal.creation; import org.mockito.cglib.proxy.MethodInterceptor; import org.mockito.cglib.proxy.MethodProxy; import org.mockito.internal.IMockHandler; +import org.mockito.internal.util.ObjectMethodsGuru; import org.mockito.internal.creation.cglib.CGLIBHacker; import org.mockito.internal.invocation.Invocation; import org.mockito.internal.invocation.realmethod.FilteredCGLIBProxyRealMethod; @@ -18,30 +19,28 @@ import java.lang.reflect.Method; @SuppressWarnings("unchecked") public class MethodInterceptorFilter implements MethodInterceptor, Serializable { - private final Method equalsMethod; private final Method hashCodeMethod; private final IMockHandler mockHandler; - CGLIBHacker cglibHacker; + CGLIBHacker cglibHacker = new CGLIBHacker(); + ObjectMethodsGuru objectMethodsGuru = new ObjectMethodsGuru(); public MethodInterceptorFilter(Class toMock, IMockHandler mockHandler) { try { if (toMock.isInterface()) { toMock = Object.class; } - equalsMethod = toMock.getMethod("equals", Object.class); hashCodeMethod = toMock.getMethod("hashCode", (Class[]) null); } catch (NoSuchMethodException e) { throw new RuntimeException("\nSomething went really wrong. Object method could not be found!" + "\n please report it to the mocking mailing list at http://mockito.org"); } this.mockHandler = mockHandler; - this.cglibHacker = new CGLIBHacker(); } public Object intercept(Object proxy, Method method, Object[] args, MethodProxy methodProxy) throws Throwable { - if (equalsMethod.equals(method)) { + if (objectMethodsGuru.isEqualsMethod(method)) { return proxy == args[0]; } else if (hashCodeMethod.equals(method)) { return hashCodeForMock(proxy); diff --git a/src/org/mockito/internal/util/ObjectMethodsGuru.java b/src/org/mockito/internal/util/ObjectMethodsGuru.java index <HASH>..<HASH> 100644 --- a/src/org/mockito/internal/util/ObjectMethodsGuru.java +++ b/src/org/mockito/internal/util/ObjectMethodsGuru.java @@ -9,5 +9,7 @@ public class ObjectMethodsGuru { && method.getName().equals("toString"); } - + public boolean isEqualsMethod(Method method) { + return method.getName().equals("equals") && method.getParameterTypes().length == 1 && method.getParameterTypes()[0] == Object.class; + } } \ No newline at end of file diff --git a/test/org/mockito/internal/util/ObjectMethodsGuruTest.java b/test/org/mockito/internal/util/ObjectMethodsGuruTest.java index <HASH>..<HASH> 100644 --- a/test/org/mockito/internal/util/ObjectMethodsGuruTest.java +++ b/test/org/mockito/internal/util/ObjectMethodsGuruTest.java @@ -6,6 +6,8 @@ import org.mockitousage.IMethods; import org.junit.Test; public class ObjectMethodsGuruTest extends TestBase { + + ObjectMethodsGuru guru = new ObjectMethodsGuru(); @Test public void shouldKnowToStringMethod() throws Exception { @@ -13,4 +15,11 @@ public class ObjectMethodsGuruTest extends TestBase { assertFalse(isToString(IMethods.class.getMethod("toString", String.class))); assertTrue(isToString(Object.class.getMethod("toString"))); } + + @Test + public void shouldKnowEqualsMethod() throws Exception { + assertFalse(guru.isEqualsMethod(IMethods.class.getMethod("equals", String.class))); + assertFalse(guru.isEqualsMethod(Object.class.getMethod("toString"))); + assertTrue(guru.isEqualsMethod(String.class.getMethod("equals", Object.class))); + } } diff --git a/test/org/mockitousage/IMethods.java b/test/org/mockitousage/IMethods.java index <HASH>..<HASH> 100644 --- a/test/org/mockitousage/IMethods.java +++ b/test/org/mockitousage/IMethods.java @@ -203,4 +203,6 @@ public interface IMethods { void longArg(long longArg); void intArgumentMethod(int i); + + boolean equals(String str); } \ No newline at end of file diff --git a/test/org/mockitousage/MethodsImpl.java b/test/org/mockitousage/MethodsImpl.java index <HASH>..<HASH> 100644 --- a/test/org/mockitousage/MethodsImpl.java +++ b/test/org/mockitousage/MethodsImpl.java @@ -377,4 +377,8 @@ public class MethodsImpl implements IMethods { public void intArgumentMethod(int i) { } + + public boolean equals(String str) { + return false; + } }
In order to make mocks serializable started work on this feature. Refactoring. --HG-- extra : convert_revision : svn%3Aaa2aecf3-ea3e-<I>-9d<I>-<I>e7c<I>/trunk%<I>
mockito_mockito
train
dacb63b5dd190f4d6ed9174508d9b3c7b6b351f9
diff --git a/coolfig/test_config.py b/coolfig/test_config.py index <HASH>..<HASH> 100644 --- a/coolfig/test_config.py +++ b/coolfig/test_config.py @@ -170,6 +170,11 @@ def test_list(): assert str_list('a,b,cd, e f g , h ') == ['a', 'b', 'cd', 'e f g', 'h'] +def test_dottedpath(): + func = types.dottedpath('coolfig.test_config.test_dottedpath') + assert func == test_dottedpath + + @pytest.mark.skipif(url is None, reason='sqlalchemy is not installed') def test_sqlalchemy_url(): val = types.sqlalchemy_url('postgres://user:password@host/database')
Added tests for the dottedpath type
GaretJax_coolfig
train
454628e8e7235acb697e9e08d4ef00f0ae476626
diff --git a/apiserver/facades/client/machinemanager/machinemanager.go b/apiserver/facades/client/machinemanager/machinemanager.go index <HASH>..<HASH> 100644 --- a/apiserver/facades/client/machinemanager/machinemanager.go +++ b/apiserver/facades/client/machinemanager/machinemanager.go @@ -505,13 +505,7 @@ func (mm *MachineManagerAPI) GetUpgradeSeriesMessages(args params.UpgradeSeriesN Results: make([]params.StringsResult, len(args.Params)), } for i, param := range args.Params { - machineTag, err := names.ParseMachineTag(param.Entity.Tag) - if err != nil { - err = errors.Trace(err) - results.Results[i].Error = common.ServerError(err) - continue - } - machine, err := mm.st.Machine(machineTag.Id()) + machine, err := mm.machineFromTag(param.Entity.Tag) if err != nil { err = errors.Trace(err) results.Results[i].Error = common.ServerError(err)
Use utily method to get machine from the tag.
juju_juju
train
65ccdfb9579564136191342eca9bf79d3f9b4d45
diff --git a/greenwich/io.py b/greenwich/io.py index <HASH>..<HASH> 100644 --- a/greenwich/io.py +++ b/greenwich/io.py @@ -102,7 +102,7 @@ class VSIFile(object): self._check_closed() if isinstance(data, bytearray): data = bytes(data) - gdal.VSIFWriteL(data, len(data), 1, self._vsif) + gdal.VSIFWriteL(data, 1, len(data), self._vsif) def writable(self): return True
Fix order of args for vsi write call
bkg_greenwich
train
a8909bbd6ece128a544cc30178a7874131d7c9b0
diff --git a/sentinelsat/scripts/cli.py b/sentinelsat/scripts/cli.py index <HASH>..<HASH> 100644 --- a/sentinelsat/scripts/cli.py +++ b/sentinelsat/scripts/cli.py @@ -1,7 +1,11 @@ import json import logging import os -from json import JSONDecodeError +try: + from json import JSONDecodeError + json_parse_exception = json.decoder.JSONDecodeError +except AttributeError: # Python 2 + json_parse_exception = ValueError import click import geojson as gj @@ -240,7 +244,7 @@ def cli( try: geometry = json.loads(geometry) search_kwargs["area"] = geojson_to_wkt(geometry) - except JSONDecodeError: + except json_parse_exception: raise click.UsageError( "geometry string starts with '{' but is not a valid GeoJSON." )
Fix Travis failed build (hopefully)
sentinelsat_sentinelsat
train
579cceff30c4a1872f57ee226fdf54d35a81753c
diff --git a/svg/svg.go b/svg/svg.go index <HASH>..<HASH> 100644 --- a/svg/svg.go +++ b/svg/svg.go @@ -112,17 +112,17 @@ func (o *Minifier) Minify(m *minify.M, w io.Writer, r io.Reader, _ map[string]st case xml.StartTagToken: tag = t.Hash if tag == svg.Metadata { - skipTag(tb) - break + t.Data = nil } else if tag == svg.Line { o.shortenLine(tb, &t, p) - } else if tag == svg.Rect && !o.shortenRect(tb, &t, p) { - skipTag(tb) - break + } else if tag == svg.Rect { + o.shortenRect(tb, &t, p) } else if tag == svg.Polygon || tag == svg.Polyline { o.shortenPoly(tb, &t, p) } - if _, err := w.Write(t.Data); err != nil { + if t.Data == nil { + skipTag(tb) + } else if _, err := w.Write(t.Data); err != nil { return err } case xml.AttributeToken: @@ -273,6 +273,15 @@ func (o *Minifier) shortenDimension(b []byte) ([]byte, int) { func (o *Minifier) shortenLine(tb *TokenBuffer, t *Token, p *PathData) { x1, y1, x2, y2 := zeroBytes, zeroBytes, zeroBytes, zeroBytes if attrs, replacee := tb.Attributes(svg.X1, svg.Y1, svg.X2, svg.Y2); replacee != nil { + // skip converting to path if any attribute contains dimensions, TODO: convert non-percentage dimensions to px + for _, attr := range attrs { + if attr != nil { + if _, dim := parse.Dimension(attr.AttrVal); dim != 0 { + return + } + } + } + if attrs[0] != nil { x1 = minify.Number(attrs[0].AttrVal, o.Decimals) attrs[0].Text = nil @@ -308,8 +317,17 @@ func (o *Minifier) shortenLine(tb *TokenBuffer, t *Token, p *PathData) { } } -func (o *Minifier) shortenRect(tb *TokenBuffer, t *Token, p *PathData) bool { +func (o *Minifier) shortenRect(tb *TokenBuffer, t *Token, p *PathData) { if attrs, replacee := tb.Attributes(svg.X, svg.Y, svg.Width, svg.Height, svg.Rx, svg.Ry); replacee != nil && attrs[4] == nil && attrs[5] == nil { + // skip converting to path if any attribute contains dimensions, TODO: convert non-percentage dimensions to px + for _, attr := range attrs { + if attr != nil { + if _, dim := parse.Dimension(attr.AttrVal); dim != 0 { + return + } + } + } + x, y, w, h := zeroBytes, zeroBytes, zeroBytes, zeroBytes if attrs[0] != nil { x = minify.Number(attrs[0].AttrVal, o.Decimals) @@ -328,7 +346,8 @@ func (o *Minifier) shortenRect(tb *TokenBuffer, t *Token, p *PathData) bool { attrs[3].Text = nil } if len(w) == 0 || w[0] == '0' || len(h) == 0 || h[0] == '0' { - return false + t.Data = nil + return } d := make([]byte, 0, 6+2*len(x)+len(y)+len(w)+len(h)) @@ -349,7 +368,6 @@ func (o *Minifier) shortenRect(tb *TokenBuffer, t *Token, p *PathData) bool { replacee.Text = dBytes replacee.AttrVal = d } - return true } func (o *Minifier) shortenPoly(tb *TokenBuffer, t *Token, p *PathData) { diff --git a/svg/svg_test.go b/svg/svg_test.go index <HASH>..<HASH> 100644 --- a/svg/svg_test.go +++ b/svg/svg_test.go @@ -54,6 +54,7 @@ func TestSVG(t *testing.T) { {`<rect x="5" y="10" rx="2" ry="3"/>`, `<rect x="5" y="10" rx="2" ry="3"/>`}, {`<rect x="5" y="10" height="40"/>`, ``}, {`<rect x="5" y="10" width="30" height="0"/>`, ``}, + {`<rect x="5" y="10" width="30%" height="100%"/>`, `<rect x="5" y="10" width="30%" height="100%"/>`}, {`<polygon points="1,2 3,4"/>`, `<path d="M1 2 3 4z"/>`}, {`<polyline points="1,2 3,4"/>`, `<path d="M1 2 3 4"/>`}, {`<svg contentStyleType="text/json ; charset=iso-8859-1"><style>{a : true}</style></svg>`, `<svg contentStyleType="text/json;charset=iso-8859-1"><style>{a : true}</style></svg>`},
Bugfix: SVG do not convert line/rect to path if coordinates have dimensions, fixes #<I>
tdewolff_minify
train
4a805efba6805c40d7bfe51e628545f27ca3d048
diff --git a/lib/index.js b/lib/index.js index <HASH>..<HASH> 100644 --- a/lib/index.js +++ b/lib/index.js @@ -53,7 +53,7 @@ var Wrap = require('./wrap'); */ // Public {{{1 -exports.app = function(mod, name) { // {{{2 +exports.app = function(mod, name, scope) { // {{{2 /** * Create a module wrap instance for an application * @@ -68,7 +68,7 @@ exports.app = function(mod, name) { // {{{2 var res = new Wrap(mod); res.package = name; - res.scope = name; + res.scope = scope || name; return res; };
Minor: O.app accepts scope now
OpenSmartEnvironment_ose
train
f7b484af68e6153ce5c26749d33f349194cddc6a
diff --git a/spec/features/batch_edit_spec.rb b/spec/features/batch_edit_spec.rb index <HASH>..<HASH> 100644 --- a/spec/features/batch_edit_spec.rb +++ b/spec/features/batch_edit_spec.rb @@ -80,7 +80,7 @@ RSpec.describe 'batch', type: :feature, clean_repo: true, js: true do # This was `expect(page).to have_content 'Changes Saved'`, however in debugging, # the `have_content` check was ignoring the `within` scoping and finding # "Changes Saved" for other field areas - find('.status', text: 'Changes Saved') + find('.status', text: 'Changes Saved', wait: 5) end within "#form_permissions" do @@ -94,7 +94,7 @@ RSpec.describe 'batch', type: :feature, clean_repo: true, js: true do # This was `expect(page).to have_content 'Changes Saved'`, however in debugging, # the `have_content` check was ignoring the `within` scoping and finding # "Changes Saved" for other field areas - find('.status', text: 'Changes Saved') + find('.status', text: 'Changes Saved', wait: 5) end # Visit work permissions and verify diff --git a/spec/support/features/batch_edit_actions.rb b/spec/support/features/batch_edit_actions.rb index <HASH>..<HASH> 100644 --- a/spec/support/features/batch_edit_actions.rb +++ b/spec/support/features/batch_edit_actions.rb @@ -17,7 +17,7 @@ def fill_in_batch_edit_fields_and_verify! # This was `expect(page).to have_content 'Changes Saved'`, however in debugging, # the `have_content` check was ignoring the `within` scoping and finding # "Changes Saved" for other field areas - find('.status', text: 'Changes Saved') + find('.status', text: 'Changes Saved', wait: 5) end end end
Wait longer for batch edit to save in feature test
samvera_hyrax
train
05929cde569efcfa4e5cda89e6544839a3d97399
diff --git a/PPI/Module/Controller.php b/PPI/Module/Controller.php index <HASH>..<HASH> 100644 --- a/PPI/Module/Controller.php +++ b/PPI/Module/Controller.php @@ -330,6 +330,15 @@ class Controller { } /** + * Get the app's global configuration + * + * @return mixed + */ + protected function getConfig() { + return $this->getService('Config'); + } + + /** * Inject services into our controller using setters matching against service names * * @return void
Adding in getConfig() alias method
ppi_framework
train
3adefd07d841b9280af1737aba9ced20a9488e95
diff --git a/src/com/opera/core/systems/runner/launcher/OperaLauncherRunnerSettings.java b/src/com/opera/core/systems/runner/launcher/OperaLauncherRunnerSettings.java index <HASH>..<HASH> 100644 --- a/src/com/opera/core/systems/runner/launcher/OperaLauncherRunnerSettings.java +++ b/src/com/opera/core/systems/runner/launcher/OperaLauncherRunnerSettings.java @@ -177,10 +177,7 @@ public class OperaLauncherRunnerSettings extends OperaRunnerSettings { ByteStreams.copy(is, os); - if (!targetLauncher.setLastModified(targetLauncher.lastModified())) { - throw new OperaRunnerException( - "Unable to set modification time for file: " + targetLauncher); - } + targetLauncher.setLastModified(targetLauncher.lastModified()); } catch (IOException e) { throw new WebDriverException("Cannot write file to disk: " + e.getMessage()); } finally {
setLastModified() check fails on Windows, resolves issue #<I>
operasoftware_operaprestodriver
train
1490a20db8513eb1375aa89d0ff59946e9270b72
diff --git a/README.md b/README.md index <HASH>..<HASH> 100644 --- a/README.md +++ b/README.md @@ -32,11 +32,7 @@ Open Sound Control (OSC) library for Golang. Implemented in pure Go. import "github.com/hypebeast/go-osc/osc" func main() { - remoteAddr, err := net.ResolveUDPAddr("udp", "localhost:8765") - if err != nil { - // handle err - } - client := osc.NewOscClient(nil, remoteAddr) + client := osc.NewOscClient("localhost", 8765) msg := osc.NewOscMessage("/osc/address") msg.Append(int32(111)) msg.Append(true) diff --git a/osc/doc.go b/osc/doc.go index <HASH>..<HASH> 100644 --- a/osc/doc.go +++ b/osc/doc.go @@ -54,11 +54,7 @@ OSC client example: - remoteAddr, err := net.ResolveUDPAddr("udp", "localhost:8765") - if err != nil { - // handle err - } - client := osc.NewOscClient(nil, remoteAddr) + client := osc.NewOscClient("localhost", 8765) msg := osc.NewOscMessage("/osc/address") msg.Append(int32(111)) msg.Append(true) diff --git a/osc/osc_test.go b/osc/osc_test.go index <HASH>..<HASH> 100644 --- a/osc/osc_test.go +++ b/osc/osc_test.go @@ -284,3 +284,15 @@ func TestServerIsNotRunningAndGetsClosed(t *testing.T) { t.Errorf("Expected error if the the server is not running and it gets closed") } } + +func TestClientSetLocalAddr(t *testing.T) { + client := NewOscClient("localhost", 8967) + err := client.SetLocalAddr("localhost", 41789) + if err != nil { + t.Error(err.Error()) + } + expectedAddr := "127.0.0.1:41789" + if client.laddr.String() != expectedAddr { + t.Errorf("Expected laddr to be %s but was %s", expectedAddr, client.laddr.String()) + } +}
cleaned up documentation and wrote test for SetLocalAddr
hypebeast_go-osc
train
005d3dd6823e38f5f9a72edddc57720ee504b57f
diff --git a/components/chatroom.js b/components/chatroom.js index <HASH>..<HASH> 100644 --- a/components/chatroom.js +++ b/components/chatroom.js @@ -533,7 +533,7 @@ SteamChatRoomClient.prototype.getFriendMessageHistory = function(friendSteamId, userLastViewed = friendSess.time_last_view; } } catch (ex) { - this.user.emit('debug', `Exception reported calling getActiveMessageSessions() inside of getFriendMessageHistory(): ${ex.message}`); + this.user.emit('debug', `Exception reported calling getActiveFriendMessageSessions() inside of getFriendMessageHistory(): ${ex.message}`); } this.user._sendUnified("FriendMessages.GetRecentMessages#1", { @@ -558,7 +558,7 @@ SteamChatRoomClient.prototype.getFriendMessageHistory = function(friendSteamId, "ordinal": msg.ordinal || 0, "message": msg.message, "message_bbcode_parsed": bbcode_format ? parseBbCode(msg.message) : null, - "unread": (msg.timestamp * 1000) > userLastViewed + "unread": msg.accountid != this.user.steamID.accountid && (msg.timestamp * 1000) > userLastViewed })); body.more_available = !!body.more_available;
Don't mark our own messages as unread
DoctorMcKay_node-steam-user
train
2381250f2606f389d0536b8d5fbd1a8428fe8e22
diff --git a/app/search_engines/bento_search/ebsco_host_engine.rb b/app/search_engines/bento_search/ebsco_host_engine.rb index <HASH>..<HASH> 100644 --- a/app/search_engines/bento_search/ebsco_host_engine.rb +++ b/app/search_engines/bento_search/ebsco_host_engine.rb @@ -228,6 +228,8 @@ class BentoSearch::EbscoHostEngine # normalization. if ["Academic Journal", "Journal"].include?(components.first) && ["Article", "Journal Article"].include?(components.last) return "Journal Article" + elsif components.last == "Book: Monograph" + return "Book" # Book: Monograph what?? elsif components.first == "Periodical" && components.length > 1 return components.last elsif components.size == 2 && components.first.include?(components.last)
ebscohost, improved heuristic for reasonable format_str
jrochkind_bento_search
train
a93fd187bc3327dd428a34d35ac0c9827606b400
diff --git a/go/chat/convsource.go b/go/chat/convsource.go index <HASH>..<HASH> 100644 --- a/go/chat/convsource.go +++ b/go/chat/convsource.go @@ -54,7 +54,7 @@ func (s *baseConversationSource) Sign(payload []byte) ([]byte, error) { // DeleteAssets implements github.com/keybase/go/chat/storage/storage.AssetDeleter interface. func (s *baseConversationSource) DeleteAssets(ctx context.Context, uid gregor1.UID, convID chat1.ConversationID, assets []chat1.Asset) { - defer s.Trace(ctx, func() error { return nil }, "DeleteAssets", assets)() + defer s.Trace(ctx, func() error { return nil }, "DeleteAssets: %v", assets)() if len(assets) == 0 { return diff --git a/go/client/cmd_chat_send.go b/go/client/cmd_chat_send.go index <HASH>..<HASH> 100644 --- a/go/client/cmd_chat_send.go +++ b/go/client/cmd_chat_send.go @@ -70,9 +70,9 @@ func newCmdChatSend(cl *libcmdline.CommandLine, g *libkb.GlobalContext) cli.Comm if ekLib.ShouldRun(context.TODO()) { flags = append(flags, cli.DurationFlag{ Name: "exploding-lifetime", - Usage: fmt.Sprintf(`Make this message an exploding message and set the - lifetime for the given duration. The maximum lifetime is %v - (one week) and the minimum lifetime is %v.`, maxEphemeralLifetime, minEphemeralLifetime), + Usage: fmt.Sprintf(`Make this message an exploding message and set the lifetime for the given duration. + The maximum lifetime is %v (one week) and the minimum lifetime is %v.`, + maxEphemeralLifetime, minEphemeralLifetime), }) } return cli.Command{ diff --git a/go/ephemeral/lib.go b/go/ephemeral/lib.go index <HASH>..<HASH> 100644 --- a/go/ephemeral/lib.go +++ b/go/ephemeral/lib.go @@ -78,10 +78,11 @@ func (e *EKLib) checkLoginAndPUK(ctx context.Context) error { func (e *EKLib) ShouldRun(ctx context.Context) bool { g := e.G() - _, ok := adminWhitelist[e.G().Env.GetUID()] + uid := g.Env.GetUID() + _, ok := adminWhitelist[uid] willRun := ok || g.Env.GetFeatureFlags().Admin() || g.Env.GetRunMode() == libkb.DevelRunMode || g.Env.RunningInCI() if !willRun { - e.G().Log.CDebugf(ctx, "EKLib skipping run") + e.G().Log.CDebugf(ctx, "EKLib skipping run uid: %v", uid) return false } @@ -207,8 +208,7 @@ func (e *EKLib) newUserEKNeeded(ctx context.Context, merkleRoot libkb.MerkleRoot defer e.G().CTrace(ctx, "newUserEKNeeded", func() error { return err })() // Let's see what the latest server statement is. - myUID := e.G().Env.GetUID() - statement, _, wrongKID, err := fetchUserEKStatement(ctx, e.G(), myUID) + statement, _, wrongKID, err := fetchUserEKStatement(ctx, e.G(), e.G().Env.GetUID()) if err != nil { return false, err }
Use GetMyUID throughout, beef up logging for ShouldRun (#<I>)
keybase_client
train
55cb5721b461b319e458172f79f7da2acd3acd91
diff --git a/lib/dicom/DataElement.rb b/lib/dicom/DataElement.rb index <HASH>..<HASH> 100644 --- a/lib/dicom/DataElement.rb +++ b/lib/dicom/DataElement.rb @@ -46,7 +46,7 @@ module DICOM if options[:bin] @bin = options[:bin] else - @bin = encode(new_value) + @bin = encode(value) end else # When no value is present, we set the binary as an empty string, unless the binary is specified:
A minor fix where encoding Data Element value gave an error.
dicom_ruby-dicom
train
3b11e7b078facc9bc0b60c5bf606798abc64207e
diff --git a/deployutils/apps/django/mockup/urls.py b/deployutils/apps/django/mockup/urls.py index <HASH>..<HASH> 100644 --- a/deployutils/apps/django/mockup/urls.py +++ b/deployutils/apps/django/mockup/urls.py @@ -42,12 +42,12 @@ urlpatterns = [ re_path(r'^users/(?P<user>%s)/' % USERNAME_PAT, TemplateView.as_view(template_name='users/index.html'), name='users_profile'), - re_path(r'^register/', + re_path(r'^register/$', SignupView.as_view(), name='registration_register'), - re_path(r'^logout/', + re_path(r'^logout/$', TemplateView.as_view(template_name='accounts/logout.html'), name='logout'), - re_path(r'^login/recover/', + re_path(r'^recover/$', TemplateView.as_view(), name='password_reset'), - re_path(r'^login/', SigninView.as_view(), name='login'), + re_path(r'^login/$', SigninView.as_view(), name='login'), ]
only responds on full url path match Bots might call '/register/.' which would be forwarded to the application in case the default rule is to forward http requests to the application. We don't the mockups to respond in that case.
djaodjin_djaodjin-deployutils
train
4cdedded19e80afdf0f16c28d4f2d8f470f00fc5
diff --git a/grip/server.py b/grip/server.py index <HASH>..<HASH> 100644 --- a/grip/server.py +++ b/grip/server.py @@ -1,5 +1,6 @@ import os import re +import errno import requests from flask import Flask, safe_join, abort from .renderer import render_page @@ -11,11 +12,13 @@ default_filenames = ['README.md', 'README.markdown'] def serve(path=None, host=None, port=None, gfm=False, context=None): """Starts a server to render the specified file or directory containing a README.""" if not path or os.path.isdir(path): - index_file, path = _find_index_file(path) + path = _find_file(path) if not os.path.exists(path): raise ValueError('File not found: ' + path) + directory = os.path.dirname(path) + # Flask application app = Flask('grip') app.config.from_pyfile('default_config.py') @@ -39,16 +42,21 @@ def serve(path=None, host=None, port=None, gfm=False, context=None): # Views @app.route('/') - def index(): - return render_page(_read_file(index_file), os.path.split(index_file)[1], gfm, context, app.config['STYLE_URLS']) - @app.route('/<path:filename>') - def other_files(filename): - try: - full_file = safe_join(path, filename) - return render_page(_read_file(full_file), os.path.split(filename)[1], gfm, context, app.config['STYLE_URLS']) - except: - abort(404) + def render(filename=None): + if filename is not None: + filename = safe_join(directory, filename) + if os.path.isdir(filename): + filename = _find_file(filename) + try: + text = _read_file(filename) + except IOError as ex: + if ex.errno != errno.ENOENT: + raise + return abort(404) + else: + text = _read_file(filename) + return render_page(text, filename, gfm, context, app.config['STYLE_URLS']) # Run local server app.run(app.config['HOST'], app.config['PORT'], debug=app.debug, use_reloader=app.config['DEBUG_GRIP']) @@ -66,14 +74,14 @@ def _get_styles(source_url, pattern): return [] -def _find_index_file(path): - """Finds the index file. Returns the index file and root path.""" +def _find_file(path): + """Gets the full path and extension of the specified.""" if path is None: path = '.' for filename in default_filenames: - index_file = os.path.join(path, filename) - if os.path.exists(index_file): - return index_file, path + full_path = os.path.join(path, filename) + if os.path.exists(full_path): + return full_path raise ValueError('No README found at ' + path)
Fix linked files rendering: * Fix bug where providing a filename on the commandline fails * Resolve subdirectories with the default files, e.g. README.md
joeyespo_grip
train
7acc55f93ae4919880790a773be710be81f45600
diff --git a/resolver.go b/resolver.go index <HASH>..<HASH> 100644 --- a/resolver.go +++ b/resolver.go @@ -2,6 +2,7 @@ package kuberesolver import ( "fmt" + "io/ioutil" "net/http" "net/url" "time" @@ -66,7 +67,8 @@ func (r *kubeResolver) watch(target string, stopCh <-chan struct{}, resultCh cha } if resp.StatusCode != http.StatusOK { defer resp.Body.Close() - return fmt.Errorf("invalid response code %d", resp.StatusCode) + rbody, _ := ioutil.ReadAll(resp.Body) + return fmt.Errorf("invalid response code %d: %s", resp.StatusCode, rbody) } sw := newStreamWatcher(resp.Body) for {
Print the body of any error response that comes back from Kubernetes
sercand_kuberesolver
train
e42e342890688bf5fd03e1752d0746d54025ceab
diff --git a/preview/assets/js/toolconfigs/BubbleChart-gapminder.js b/preview/assets/js/toolconfigs/BubbleChart-gapminder.js index <HASH>..<HASH> 100644 --- a/preview/assets/js/toolconfigs/BubbleChart-gapminder.js +++ b/preview/assets/js/toolconfigs/BubbleChart-gapminder.js @@ -62,6 +62,10 @@ var VIZABI_MODEL = { "use": "property", "which": "name" }, + "rank": { + "use": "property", + "which": "rank" + }, "geoshape": { "use": "property", "which": "shape_lores_svg" diff --git a/src/base/model.js b/src/base/model.js index <HASH>..<HASH> 100644 --- a/src/base/model.js +++ b/src/base/model.js @@ -573,7 +573,7 @@ var Model = EventSource.extend({ var join = this._getAllJoins(exceptions, splashScreen); // order by - order_by = (!prop) ? [this._space.time.dim] : []; + order_by = prop ? ["rank"] : [this._space.time.dim]; //return query return {
Order colorlegend lines by rank from data
vizabi_vizabi
train
93ef3050c1904c7b7afd866c3c1df151ffd07971
diff --git a/angr/analyses/ddg.py b/angr/analyses/ddg.py index <HASH>..<HASH> 100644 --- a/angr/analyses/ddg.py +++ b/angr/analyses/ddg.py @@ -1101,4 +1101,114 @@ class DDG(Analysis): if not dst_target_func is src_target_func: self._function_data_dependencies[dst_target_func].add_edge(src, dst, **data) + + def find_definitions(self, variable, simplified_graph=True): + """ + Find all definitions of the given variable. + + :param SimVariable variable: + :param bool simplified_graph: True if you just want to search in the simplified graph instead of the normal + graph. Usually the simplified graph suffices for finding definitions of register + or memory variables. + :return: A collection of all variable definitions to the specific variable. + :rtype: list + """ + + if simplified_graph: + graph = self.simplified_data_graph + else: + graph = self.graph + + defs = [] + + for n in graph.nodes_iter(): # type: ProgramVariable + if n.variable == variable: + defs.append(n) + + return defs + + + def find_consumers(self, var_def, simplified_graph=True): + """ + Find all consumers to the specified variable definition. + + :param ProgramVariable var_def: The variable definition. + :param bool simplified_graph: True if we want to search in the simplified graph, False otherwise. + :return: A collection of all consumers to the specified variable definition. + :rtype: list + """ + + if simplified_graph: + graph = self.simplified_data_graph + else: + graph = self.graph + + if var_def not in graph: + return [] + + consumers = [] + out_edges = graph.out_edges(var_def, data=True) + for _, dst, data in out_edges: + if 'type' in data and data['type'] == 'kill': + # skip killing edges + continue + consumers.append(dst) + + return consumers + + + def find_killers(self, var_def, simplified_graph=True): + """ + Find all killers to the specified variable definition. + + :param ProgramVariable var_def: The variable definition. + :param bool simplified_graph: True if we want to search in the simplified graph, False otherwise. + :return: A collection of all killers to the specified variable definition. + :rtype: list + """ + + if simplified_graph: + graph = self.simplified_data_graph + else: + graph = self.graph + + if var_def not in graph: + return [] + + killers = [] + out_edges = graph.out_edges(var_def, data=True) + for _, dst, data in out_edges: + if 'type' in data and data['type'] == 'kill': + killers.append(dst) + + return killers + + + def find_sources(self, var_def, simplified_graph=True): + """ + Find all sources to the specified variable definition. + + :param ProgramVariable var_def: The variable definition. + :param bool simplified_graph: True if we want to search in the simplified graph, False otherwise. + :return: A collection of all sources to the specified variable definition. + :rtype: list + """ + + if simplified_graph: + graph = self.simplified_data_graph + else: + graph = self.graph + + if var_def not in graph: + return [] + + sources = [] + in_edges = graph.in_edges(var_def, data=True) + for src, _, data in in_edges: + if 'type' in data and data['type'] == 'kill': + continue + sources.append(src) + + return sources + register_analysis(DDG, 'DDG')
DDG: add some handy methods.
angr_angr
train
98b00680d9c98ac1c2111a6fe24d2fdaf63852c1
diff --git a/ui/mirage/factories/job-summary.js b/ui/mirage/factories/job-summary.js index <HASH>..<HASH> 100644 --- a/ui/mirage/factories/job-summary.js +++ b/ui/mirage/factories/job-summary.js @@ -1,4 +1,4 @@ -import { Factory, faker } from 'ember-cli-mirage'; +import { Factory, faker, trait } from 'ember-cli-mirage'; export default Factory.extend({ // Hidden property used to compute the Summary hash @@ -6,17 +6,27 @@ export default Factory.extend({ JobID: '', - Summary: function() { - return this.groupNames.reduce((summary, group) => { - summary[group] = { - Queued: faker.random.number(10), - Complete: faker.random.number(10), - Failed: faker.random.number(10), - Running: faker.random.number(10), - Starting: faker.random.number(10), - Lost: faker.random.number(10), - }; - return summary; - }, {}); - }, + withSummary: trait({ + Summary: function() { + return this.groupNames.reduce((summary, group) => { + summary[group] = { + Queued: faker.random.number(10), + Complete: faker.random.number(10), + Failed: faker.random.number(10), + Running: faker.random.number(10), + Starting: faker.random.number(10), + Lost: faker.random.number(10), + }; + return summary; + }, {}); + }, + }), + + withChildren: trait({ + Children: () => ({ + Pending: faker.random.number(10), + Running: faker.random.number(10), + Dead: faker.random.number(10), + }), + }), }); diff --git a/ui/mirage/factories/job.js b/ui/mirage/factories/job.js index <HASH>..<HASH> 100644 --- a/ui/mirage/factories/job.js +++ b/ui/mirage/factories/job.js @@ -1,4 +1,4 @@ -import { Factory, faker } from 'ember-cli-mirage'; +import { Factory, faker, trait } from 'ember-cli-mirage'; import { provide, provider, pickOne } from '../utils'; import { DATACENTERS } from '../common'; @@ -22,10 +22,48 @@ export default Factory.extend({ faker.list.random(...DATACENTERS) ), - periodic: () => Math.random() > 0.5, - parameterized() { - return !this.periodic; - }, + childrenCount: () => faker.random.number({ min: 1, max: 5 }), + + periodic: trait({ + type: 'batch', + periodic: true, + // periodic details object + // serializer update for bool vs details object + periodicDetails: () => ({ + Enabled: true, + ProhibitOverlap: true, + Spec: '*/5 * * * * *', + SpecType: 'cron', + TimeZone: 'UTC', + }), + }), + + parameterized: trait({ + type: 'batch', + parameterized: true, + // parameterized details object + // serializer update for bool vs details object + parameterizedDetails: () => ({ + MetaOptional: null, + MetaRequired: null, + Payload: Math.random() > 0.5 ? 'required' : null, + }), + }), + + periodicChild: trait({ + // Periodic children need a parent job, + // It is the Periodic job's responsibility to create + // periodicChild jobs and provide a parent job. + type: 'batch', + }), + + parameterizedChild: trait({ + // Parameterized children need a parent job, + // It is the Parameterized job's responsibility to create + // parameterizedChild jobs and provide a parent job. + type: 'batch', + payload: window.btoa(faker.lorem.sentence()), + }), createIndex: i => i, modifyIndex: () => faker.random.number({ min: 10, max: 2000 }), @@ -70,7 +108,8 @@ export default Factory.extend({ }); } - const jobSummary = server.create('job-summary', { + const hasChildren = job.periodic || job.parameterized; + const jobSummary = server.create('job-summary', hasChildren ? 'withChildren' : 'withSummary', { groupNames: groups.mapBy('name'), job, }); @@ -102,5 +141,23 @@ export default Factory.extend({ modifyIndex: 4000, }); } + + if (job.periodic) { + // Create periodicChild jobs + server.createList('job', job.childrenCount, 'periodicChild', { + parentId: job.id, + namespaceId: job.namespaceId, + namespace: job.namespace, + }); + } + + if (job.parameterized) { + // Create parameterizedChild jobs + server.createList('job', job.childrenCount, 'parameterizedChild', { + parentId: job.id, + namespaceId: job.namespaceId, + namespace: job.namespace, + }); + } }, });
Update job factory to use traits for specifying job type
hashicorp_nomad
train
e5143356f383e12a7b1c70fabfedb2b0c2dd16f2
diff --git a/lib/searchkick/query.rb b/lib/searchkick/query.rb index <HASH>..<HASH> 100644 --- a/lib/searchkick/query.rb +++ b/lib/searchkick/query.rb @@ -660,11 +660,11 @@ module Searchkick def set_boost_by_indices(payload) return unless options[:indices_boost] - indices_boost = options[:indices_boost].each_with_object({}) do |(key, boost), memo| + indices_boost = options[:indices_boost].map do |key, boost| index = key.respond_to?(:searchkick_index) ? key.searchkick_index.name : key # try to use index explicitly instead of alias: https://github.com/elasticsearch/elasticsearch/issues/4756 index_by_alias = Searchkick.client.indices.get_alias(index: index).keys.first - memo[index_by_alias || index] = boost + {(index_by_alias || index) => boost} end payload[:indices_boost] = indices_boost @@ -812,7 +812,7 @@ module Searchkick # TODO id transformation for arrays def set_order(payload) order = options[:order].is_a?(Enumerable) ? options[:order] : {options[:order] => :asc} - id_field = :_uid + id_field = below60? ? :_uid : :_id payload[:sort] = order.is_a?(Array) ? order : Hash[order.map { |k, v| [k.to_s == "id" ? id_field : k, v] }] end
Fixed deprecation warnings in code
ankane_searchkick
train
5214f19b36e98c8248232a0cd6122f9b7258ba63
diff --git a/prow/github/BUILD.bazel b/prow/github/BUILD.bazel index <HASH>..<HASH> 100644 --- a/prow/github/BUILD.bazel +++ b/prow/github/BUILD.bazel @@ -18,6 +18,7 @@ go_test( embed = [":go_default_library"], deps = [ "//ghproxy/ghcache:go_default_library", + "@com_github_sirupsen_logrus//:go_default_library", "@io_k8s_apimachinery//pkg/util/sets:go_default_library", "@io_k8s_utils//diff:go_default_library", ], diff --git a/prow/github/client.go b/prow/github/client.go index <HASH>..<HASH> 100644 --- a/prow/github/client.go +++ b/prow/github/client.go @@ -645,6 +645,7 @@ func (c *client) requestRetry(method, path, accept string, body interface{}) (*h // retry more than a couple times in this case, because a 404 may // be caused by a bad API call and we'll just burn through API // tokens. + c.logger.WithField("backoff", backoff.String()).Debug("Retrying 404") c.time.Sleep(backoff) backoff *= 2 } else if resp.StatusCode == 403 { @@ -657,6 +658,7 @@ func (c *client) requestRetry(method, path, accept string, body interface{}) (*h // sleep. If it's going to take too long, then break. sleepTime := c.time.Until(time.Unix(int64(t), 0)) + time.Second if sleepTime < c.maxSleepTime { + c.logger.WithField("backoff", sleepTime.String()).Debug("Retrying after token budget reset") c.time.Sleep(sleepTime) } else { err = fmt.Errorf("sleep time for token reset exceeds max sleep time (%v > %v)", sleepTime, c.maxSleepTime) @@ -677,6 +679,7 @@ func (c *client) requestRetry(method, path, accept string, body interface{}) (*h // sleep. If it's going to take too long, then break. sleepTime := time.Duration(t+1) * time.Second if sleepTime < c.maxSleepTime { + c.logger.WithField("backoff", sleepTime.String()).Debug("Retrying after abuse ratelimit reset") c.time.Sleep(sleepTime) } else { err = fmt.Errorf("sleep time for abuse rate limit exceeds max sleep time (%v > %v)", sleepTime, c.maxSleepTime) @@ -702,12 +705,19 @@ func (c *client) requestRetry(method, path, accept string, body interface{}) (*h break } else { // Retry 500 after a break. + c.logger.WithField("backoff", backoff.String()).Debug("Retrying 5XX") c.time.Sleep(backoff) backoff *= 2 } } else { // Connection problem. Try a different host. + oldHostIndex := hostIndex hostIndex = (hostIndex + 1) % len(c.bases) + c.logger.WithFields(logrus.Fields{ + "backoff": backoff.String(), + "old-endpoint": c.bases[oldHostIndex], + "new-endpoint": c.bases[hostIndex], + }).Debug("Retrying request due to connection problem") c.time.Sleep(backoff) backoff *= 2 } diff --git a/prow/github/client_test.go b/prow/github/client_test.go index <HASH>..<HASH> 100644 --- a/prow/github/client_test.go +++ b/prow/github/client_test.go @@ -33,6 +33,7 @@ import ( "testing" "time" + "github.com/sirupsen/logrus" "k8s.io/apimachinery/pkg/util/sets" "k8s.io/utils/diff" @@ -56,7 +57,10 @@ func getClient(url string) *client { return []byte("") } + logger := logrus.New() + logger.SetLevel(logrus.DebugLevel) return &client{ + logger: logrus.NewEntry(logger), delegate: &delegate{ time: &testTime{}, getToken: getToken,
GitHub client: Log retries
kubernetes_test-infra
train
97b97b63302213f1b3180d1423fb01f9493a5a1b
diff --git a/mod/chat/lib.php b/mod/chat/lib.php index <HASH>..<HASH> 100644 --- a/mod/chat/lib.php +++ b/mod/chat/lib.php @@ -327,7 +327,7 @@ function chat_print_recent_activity($course, $viewfullnames, $timestart) { $strftimerecent = get_string('strftimerecent'); if ($past) { - echo $OUTPUT->heading(get_string("pastchats", 'chat').':'); + echo $OUTPUT->heading(get_string("pastchats", 'chat').':', 3); foreach ($past as $cm) { $link = $CFG->wwwroot.'/mod/chat/view.php?id='.$cm->id; @@ -338,7 +338,7 @@ function chat_print_recent_activity($course, $viewfullnames, $timestart) { } if ($current) { - echo $OUTPUT->heading(get_string("currentchats", 'chat').':'); + echo $OUTPUT->heading(get_string("currentchats", 'chat').':', 3); $oldest = floor((time()-$CFG->chat_old_ping)/10)*10; // better db caching
MDL-<I> mod_chat: Use H3 instead of H2 in recent activity info
moodle_moodle
train
bea2e74dc677a3e73dc0c9b2172ed9514479408d
diff --git a/webgl.go b/webgl.go index <HASH>..<HASH> 100644 --- a/webgl.go +++ b/webgl.go @@ -467,7 +467,7 @@ func (c *Context) Clear(flags int) { } // Specifies color values to use by the clear method to clear the color buffer. -func (c *Context) ClearColor(r, g, b, a float64) { +func (c *Context) ClearColor(r, g, b, a float32) { c.Call("clearColor", r, g, b, a) }
ClearColor now takes float<I>
gopherjs_webgl
train
8075d5209b10309291485dcb64ebf66d01f5f135
diff --git a/clustering/src/main/java/org/jboss/as/clustering/infinispan/DefaultEmbeddedCacheManager.java b/clustering/src/main/java/org/jboss/as/clustering/infinispan/DefaultEmbeddedCacheManager.java index <HASH>..<HASH> 100644 --- a/clustering/src/main/java/org/jboss/as/clustering/infinispan/DefaultEmbeddedCacheManager.java +++ b/clustering/src/main/java/org/jboss/as/clustering/infinispan/DefaultEmbeddedCacheManager.java @@ -22,6 +22,7 @@ package org.jboss.as.clustering.infinispan; +import java.security.AccessController; import java.util.HashSet; import java.util.List; import java.util.Set; @@ -36,11 +37,15 @@ import org.infinispan.manager.CacheContainer; import org.infinispan.manager.EmbeddedCacheManager; import org.infinispan.remoting.transport.Address; import org.infinispan.stats.Stats; +import org.jboss.util.loading.ContextClassLoaderSwitcher; +import org.jboss.util.loading.ContextClassLoaderSwitcher.SwitchContext; /** * @author Paul Ferraro */ public class DefaultEmbeddedCacheManager implements EmbeddedCacheManager { + @SuppressWarnings("unchecked") + private static final ContextClassLoaderSwitcher switcher = (ContextClassLoaderSwitcher) AccessController.doPrivileged(ContextClassLoaderSwitcher.INSTANTIATOR); private final String defaultCache; private final EmbeddedCacheManager container; @@ -74,7 +79,14 @@ public class DefaultEmbeddedCacheManager implements EmbeddedCacheManager { */ @Override public <K, V> Cache<K, V> getCache(String cacheName, boolean start) { - return new DelegatingCache<K, V>(this.container.<K, V>getCache(this.getCacheName(cacheName))); + SwitchContext context = start ? switcher.getSwitchContext(DefaultEmbeddedCacheManager.class.getClassLoader()) : null; + try { + return new DelegatingCache<K, V>(this.container.<K, V>getCache(this.getCacheName(cacheName))); + } finally { + if (context != null) { + context.reset(); + } + } } /**
Restore classloader switching on cache creation. Fixes CNFE when using custom executors.
wildfly_wildfly
train
00feeedbe69da9737b6d9ef11028d1fd6c1d36be
diff --git a/ask-sdk-core/ask_sdk_core/response_helper.py b/ask-sdk-core/ask_sdk_core/response_helper.py index <HASH>..<HASH> 100644 --- a/ask-sdk-core/ask_sdk_core/response_helper.py +++ b/ask-sdk-core/ask_sdk_core/response_helper.py @@ -27,6 +27,7 @@ if typing.TYPE_CHECKING: from ask_sdk_model import Directive from ask_sdk_model.ui import Card from ask_sdk_model.canfulfill import CanFulfillIntent + from ask_sdk_model.ui.play_behavior import PlayBehavior PLAIN_TEXT_TYPE = "PlainText" @@ -51,23 +52,27 @@ class ResponseFactory(object): directives=None, should_end_session=None, can_fulfill_intent=None) - def speak(self, speech): - # type: (str) -> 'ResponseFactory' + def speak(self, speech, play_behavior=None): + # type: (str, PlayBehavior) -> 'ResponseFactory' """Say the provided speech to the user. :param speech: the output speech sent back to the user. :type speech: str + :param play_behavior: attribute to control alexa's speech + interruption + :type play_behavior: ask_sdk_model.ui.play_behavior.PlayBehavior :return: response factory with partial response being built and access from self.response. :rtype: ResponseFactory """ ssml = "<speak>{}</speak>".format(self.__trim_outputspeech( speech_output=speech)) - self.response.output_speech = SsmlOutputSpeech(ssml=ssml) + self.response.output_speech = SsmlOutputSpeech( + ssml=ssml, play_behavior=play_behavior) return self - def ask(self, reprompt): - # type: (str) -> 'ResponseFactory' + def ask(self, reprompt, play_behavior=None): + # type: (str, PlayBehavior) -> 'ResponseFactory' """Provide reprompt speech to the user, if no response for 8 seconds. @@ -76,13 +81,17 @@ class ResponseFactory(object): :param reprompt: the output speech to reprompt. :type reprompt: str + :param play_behavior: attribute to control alexa's speech + interruption + :type play_behavior: ask_sdk_model.ui.play_behavior.PlayBehavior :return: response factory with partial response being built and access from self.response. :rtype: ResponseFactory """ ssml = "<speak>{}</speak>".format(self.__trim_outputspeech( speech_output=reprompt)) - output_speech = SsmlOutputSpeech(ssml=ssml) + output_speech = SsmlOutputSpeech( + ssml=ssml, play_behavior=play_behavior) self.response.reprompt = Reprompt(output_speech=output_speech) if not self.__is_video_app_launch_directive_present(): self.response.should_end_session = False diff --git a/ask-sdk-core/tests/unit/test_response_helper.py b/ask-sdk-core/tests/unit/test_response_helper.py index <HASH>..<HASH> 100644 --- a/ask-sdk-core/tests/unit/test_response_helper.py +++ b/ask-sdk-core/tests/unit/test_response_helper.py @@ -26,6 +26,7 @@ from ask_sdk_model.interfaces.display import RichText from ask_sdk_model.canfulfill import ( CanFulfillIntent, CanFulfillIntentValues, CanFulfillSlot, CanFulfillSlotValues, CanUnderstandSlotValues) +from ask_sdk_model.ui.play_behavior import PlayBehavior from ask_sdk_core.response_helper import ( ResponseFactory, get_text_content, get_plain_text_content, @@ -43,6 +44,16 @@ class TestResponseFactory(unittest.TestCase): ssml="<speak></speak>"), ( "The speak method of ResponseFactory fails to set output speech") + def test_speak_with_play_behavior(self): + test_play_behavior = PlayBehavior.ENQUEUE + response_factory = self.response_factory.speak( + speech=None, play_behavior=test_play_behavior) + + assert response_factory.response.output_speech == SsmlOutputSpeech( + ssml="<speak></speak>", play_behavior=test_play_behavior), ( + "The speak method of ResponseFactory fails to set play behavior " + "on output speech") + def test_ask(self): response_factory = self.response_factory.ask(reprompt=None) @@ -53,6 +64,18 @@ class TestResponseFactory(unittest.TestCase): "The ask method of ResponseFactory fails to set the " "should_end_session to False") + def test_ask_with_play_behavior(self): + test_play_behavior = PlayBehavior.REPLACE_ALL + response_factory = self.response_factory.ask( + reprompt=None, play_behavior=test_play_behavior) + + assert response_factory.response.reprompt == Reprompt( + output_speech=SsmlOutputSpeech( + ssml="<speak></speak>", + play_behavior=test_play_behavior)), ( + "The ask method of ResponseFactory fails to set play behavior " + "on reprompt output speech") + def test_ask_with_video_app_launch_directive(self): directive = LaunchDirective(video_item=VideoItem( source=None, metadata=Metadata(title=None, subtitle=None)))
Add PlayBehavior optional parameter on response builder speak, ask methods This commit includes the optional parameter play_behavior on ResponseFactory's speak and ask methods, to include the Speech Interruption property PlayBehavior as mentioned in the Alexa Response structure definition.
alexa_alexa-skills-kit-sdk-for-python
train
04d7e7a21339d9101c2e9b4e81eafd6d3753334e
diff --git a/lib/marvel/client.rb b/lib/marvel/client.rb index <HASH>..<HASH> 100644 --- a/lib/marvel/client.rb +++ b/lib/marvel/client.rb @@ -1,7 +1,7 @@ require 'pry' require 'json' require 'faraday' -require 'marvel/configuration' +require_relative 'configuration' module Marvel class Client @@ -13,6 +13,14 @@ module Marvel reset end + binding.pry + + # Requests on the server side must be of the form + # http://gateway.marvel.com/v1/comics/?ts=1&apikey=1234&hash=ffd275c5130566a2916217b101f26150 + # where ts is a timestamp + # where apikey is your public API key + # where hash is the MD5 hash of your private API key + # TODO; MODULARIZE THIS!!! # TODO; Refactor — tons of duplication @@ -26,21 +34,25 @@ module Marvel # fetches a single character by id def get_character(id) # v1/public/characters/{characterId} + Faraday.get("#{BASE_URL}characters/#{id}?api_key=#{api_key}").body end # fetches lists of comics filtered by a character id def get_comics_by_character_id(id) # v1/public/characters/{characterId}/comics + Faraday.get("#{BASE_URL}comics/#{id}?api_key=#{api_key}").body end # fetches lists of events filtered by a character id def get_events_by_character_id(id) # v1/public/characters/{characterId}/events + Faraday.get("#{BASE_URL}events/#{id}?api_key=#{api_key}").body end # fetches lists of stories filtered by a character id def get_stories_by_character_id(id) # v1/public/characters/{characterId}/stories + Faraday.get("#{BASE_URL}stories/#{id}?api_key=#{api_key}").body end # Comics:
Adds require relative and multiple comments on how to get requests working
O-I_marvel
train
27e56df511383ab2b5492466478f26ea19effae7
diff --git a/tests/src/test/java/alluxio/hadoop/fs/AccumulatingReducer.java b/tests/src/test/java/alluxio/hadoop/fs/AccumulatingReducer.java index <HASH>..<HASH> 100644 --- a/tests/src/test/java/alluxio/hadoop/fs/AccumulatingReducer.java +++ b/tests/src/test/java/alluxio/hadoop/fs/AccumulatingReducer.java @@ -60,10 +60,10 @@ public class AccumulatingReducer extends MapReduceBase implements Reducer<Text, } /** - * This method accumulates values based on their type + * This method accumulates values based on their type. * - * @param key the type of values. - * @param values the values to accumulates. + * @param key the type of values + * @param values the values to accumulates * @param output collect the result of accumulating * @param reporter to report progress and update status information * @throws IOException
Add javadoc for AccumulatingReducer#reduce
Alluxio_alluxio
train
62c493fd59028b07d5516c7dbe8769931bdd5442
diff --git a/guice/common/src/main/java/com/peterphi/std/guice/restclient/jaxb/webquery/WQUriControlField.java b/guice/common/src/main/java/com/peterphi/std/guice/restclient/jaxb/webquery/WQUriControlField.java index <HASH>..<HASH> 100644 --- a/guice/common/src/main/java/com/peterphi/std/guice/restclient/jaxb/webquery/WQUriControlField.java +++ b/guice/common/src/main/java/com/peterphi/std/guice/restclient/jaxb/webquery/WQUriControlField.java @@ -14,7 +14,8 @@ public enum WQUriControlField */ OFFSET("_offset"), /** - * Set to the maximum results to return for this query + * Set to the maximum results to return for this query; a special limit of -1 (see {@link WebQuery#LIMIT_RETURN_ZERO}) + * requests no row data (useful when just wanting a count of resultset size. */ LIMIT("_limit"), ORDER("_order"),
Improve javadoc in WQUriControlField
petergeneric_stdlib
train
49a1758f681ec42c86b5d4f5454b7250d3ee3766
diff --git a/README.rdoc b/README.rdoc index <HASH>..<HASH> 100644 --- a/README.rdoc +++ b/README.rdoc @@ -123,6 +123,13 @@ Server certificate verification is enabled by default. If you don't want to chec require 'flickraw' FlickRaw.check_certificate = false +=== CA Certificate File Path + +OpenSSL::X509::DEFAULT_CERT_FILE is used as a CA certificate file. If you want to change the path : + + require 'flickraw' + FlickRaw.ca_file = '/path/to/cacert.pem' + == Flickr URL Helpers There are some helpers to build flickr urls : diff --git a/lib/flickraw/api.rb b/lib/flickraw/api.rb index <HASH>..<HASH> 100644 --- a/lib/flickraw/api.rb +++ b/lib/flickraw/api.rb @@ -43,6 +43,7 @@ module FlickRaw @oauth_consumer = OAuthClient.new(FlickRaw.api_key, FlickRaw.shared_secret) @oauth_consumer.proxy = FlickRaw.proxy @oauth_consumer.check_certificate = FlickRaw.check_certificate + @oauth_consumer.ca_file = FlickRaw.ca_file @oauth_consumer.user_agent = USER_AGENT @access_token = @access_secret = nil @@ -169,6 +170,9 @@ module FlickRaw # Check the server certificate (ssl connection only) attr_accessor :check_certificate + # Set path of a CA certificate file in PEM format (ssl connection only) + attr_accessor :ca_file + BASE58_ALPHABET="123456789abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ".freeze def base58(id) id = id.to_i diff --git a/lib/flickraw/oauth.rb b/lib/flickraw/oauth.rb index <HASH>..<HASH> 100644 --- a/lib/flickraw/oauth.rb +++ b/lib/flickraw/oauth.rb @@ -64,6 +64,7 @@ module FlickRaw attr_accessor :user_agent attr_reader :proxy attr_accessor :check_certificate + attr_accessor :ca_file def proxy=(url); @proxy = URI.parse(url || '') end def initialize(consumer_key, consumer_secret) @@ -146,6 +147,7 @@ module FlickRaw http = Net::HTTP.new(url.host, url.port, @proxy.host, @proxy.port, @proxy.user, @proxy.password) http.use_ssl = (url.scheme == 'https') http.verify_mode = (@check_certificate ? OpenSSL::SSL::VERIFY_PEER : OpenSSL::SSL::VERIFY_NONE) + http.ca_file = @ca_file r = http.start {|agent| request = Net::HTTP::Post.new(url.path) request['User-Agent'] = @user_agent if @user_agent
Add FlickRaw.ca_file option Secure endpoint connection fails if CA certificate file is old or is not installed. So, add FlickRaw.ca_file option to set the CA certificate file path even if we doesn't have root access right.
hanklords_flickraw
train
2697c2ae19cb5d073a0d710cb6f3a35927c6392e
diff --git a/src/DI/SecurityExtension.php b/src/DI/SecurityExtension.php index <HASH>..<HASH> 100644 --- a/src/DI/SecurityExtension.php +++ b/src/DI/SecurityExtension.php @@ -60,6 +60,7 @@ class SecurityExtension extends CompilerExtension $builder->getDefinition($this->prefix('storage.' . $firewall)) ->setArguments([ 'identityValidator' => '@' . $name, + 'namespace' => $firewall, ]); } else { throw new AssertionException("Identity validator '$name' of firewall '$firewall' could not be passed to corresponding storage."); diff --git a/src/UserStorage.php b/src/UserStorage.php index <HASH>..<HASH> 100644 --- a/src/UserStorage.php +++ b/src/UserStorage.php @@ -16,9 +16,15 @@ class UserStorage extends BaseUserStorage /** @var IdentityValidatorInterface */ private $identityValidator; - public function __construct(Session $session, dentityValidatorInterface $identityValidator = NULL) + /** + * @param string $namespace + * @param Session $session + * @param IdentityValidatorInterface $identityValidator + */ + public function __construct($namespace, Session $session, IdentityValidatorInterface $identityValidator = NULL) { parent::__construct($session); + $this->setNamespace($namespace); $this->identityValidator = $identityValidator; }
Added namespace to UserStorage
Arachne_Security
train
72d7dab05fd3eb160bd73ffb505d312ce04fdf2b
diff --git a/lib/fluent/formatter.rb b/lib/fluent/formatter.rb index <HASH>..<HASH> 100644 --- a/lib/fluent/formatter.rb +++ b/lib/fluent/formatter.rb @@ -1,7 +1,23 @@ +# +# Fluent +# +# Copyright (C) 2014 Fluentd project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# module Fluent require 'fluent/registry' - # TextFormatter is module, not class. This is for reducing method call unlike TextParser. module TextFormatter module HandleTagAndTimeMixin def self.included(klass) @@ -66,7 +82,6 @@ module Fluent include Configurable include HandleTagAndTimeMixin - # Other formatter also should have this paramter? config_param :time_as_epoch, :bool, :default => false def configure(conf) @@ -89,7 +104,6 @@ module Fluent end end - # Should use 'ltsv' gem? class LabeledTSVFormatter include Configurable include HandleTagAndTimeMixin @@ -108,8 +122,7 @@ module Fluent end end - # More better name? - class OneKeyFormatter + class SingleValueFormatter include Configurable config_param :message_key, :string, :default => 'message' @@ -124,7 +137,7 @@ module Fluent 'out_file' => Proc.new { OutFileFormatter.new }, 'json' => Proc.new { JSONFormatter.new }, 'ltsv' => Proc.new { LabeledTSVFormatter.new }, - 'onekey' => Proc.new { OneKeyFormatter.new }, + 'single_value' => Proc.new { SingleValueFormatter.new }, }.each { |name, factory| TEMPLATE_REGISTRY.register(name, factory) } diff --git a/test/test_formatter.rb b/test/test_formatter.rb index <HASH>..<HASH> 100644 --- a/test/test_formatter.rb +++ b/test/test_formatter.rb @@ -163,11 +163,11 @@ module FormatterTest end end - class OneKeyFormatterTest < ::Test::Unit::TestCase + class SingleValueFormatterTest < ::Test::Unit::TestCase include FormatterTest def test_config_params - formatter = TextFormatter::OneKeyFormatter.new + formatter = TextFormatter::SingleValueFormatter.new assert_equal "message", formatter.message_key formatter.configure('message_key' => 'foobar') @@ -175,13 +175,13 @@ module FormatterTest end def test_format - formatter = TextFormatter::TEMPLATE_REGISTRY.lookup('onekey').call + formatter = TextFormatter::TEMPLATE_REGISTRY.lookup('single_value').call formatted = formatter.format('tag', Engine.now, {'message' => 'awesome'}) assert_equal('awesome', formatted) end def test_format_with_message_key - formatter = TextFormatter::OneKeyFormatter.new + formatter = TextFormatter::SingleValueFormatter.new formatter.configure('message_key' => 'foobar') formatted = formatter.format('tag', Engine.now, {'foobar' => 'foo'})
onekey format changed to single_value in TextFormatter
fluent_fluentd
train
e25731d94f57e66bcd2c2e64c68174542207109d
diff --git a/docs/conf.py b/docs/conf.py index <HASH>..<HASH> 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -25,7 +25,7 @@ about_fields = [ about = {} DIR = os.path.dirname('__file__') -with open(os.path.join(DIR, '../setup.py'), 'r') as f: +with open(os.path.join(DIR, '../setup_trinity.py'), 'r') as f: for line in f: for field in about_fields: if ' ' + field + '=' in line: @@ -194,4 +194,4 @@ doctest_default_flags = (0 | doctest.ELLIPSIS | doctest.IGNORE_EXCEPTION_DETAIL | doctest.NORMALIZE_WHITESPACE -) \ No newline at end of file +)
docs: build with trinity package info
ethereum_py-evm
train
ba20f8bb0d2c1a1e9d35e274c225dac0a95557ae
diff --git a/lib/state_machines/machine_collection.rb b/lib/state_machines/machine_collection.rb index <HASH>..<HASH> 100644 --- a/lib/state_machines/machine_collection.rb +++ b/lib/state_machines/machine_collection.rb @@ -1,8 +1,6 @@ module StateMachines # Represents a collection of state machines for a class class MachineCollection < Hash - - # Initializes the state of each machine in the given object. This can allow # states to be initialized in two groups: static and dynamic. For example: # @@ -13,9 +11,9 @@ module StateMachines # If no block is provided, then all states will still be initialized. # # Valid configuration options: - # * <tt>:static</tt> - Whether to initialize static states. If set to - # :force, the state will be initialized regardless of its current value. - # Default is :force. + # * <tt>:static</tt> - Whether to initialize static states. Unless set to + # false, the state will be initialized regardless of its current value. + # Default is true. # * <tt>:dynamic</tt> - Whether to initialize dynamic states. If set to # :force, the state will be initialized regardless of its current value. # Default is true. diff --git a/test/unit/machine/machine_with_static_initial_state_test.rb b/test/unit/machine/machine_with_static_initial_state_test.rb index <HASH>..<HASH> 100644 --- a/test/unit/machine/machine_with_static_initial_state_test.rb +++ b/test/unit/machine/machine_with_static_initial_state_test.rb @@ -29,17 +29,6 @@ class MachineWithStaticInitialStateTest < StateMachinesTest assert_equal 'parked', @klass.new.state end - def test_not_set_initial_state_even_if_not_empty - @klass.class_eval do - def initialize(_attributes = {}) - self.state = 'idling' - super() - end - end - object = @klass.new - assert_equal 'idling', object.state - end - def test_should_not_initial_state_prior_to_initialization base = Class.new do attr_accessor :state_on_init diff --git a/test/unit/machine_collection/machine_collection_state_initialization_test.rb b/test/unit/machine_collection/machine_collection_state_initialization_test.rb index <HASH>..<HASH> 100644 --- a/test/unit/machine_collection/machine_collection_state_initialization_test.rb +++ b/test/unit/machine_collection/machine_collection_state_initialization_test.rb @@ -57,10 +57,10 @@ class MachineCollectionStateInitializationTest < StateMachinesTest assert_equal 'active', @object.alarm_state end - def test_should_not_initialize_existing_static_states_by_default + def test_should_initialize_existing_static_states_by_default @object.state = 'idling' @machines.initialize_states(@object) - assert_equal 'idling', @object.state + assert_equal 'parked', @object.state end def test_should_initialize_existing_static_states_if_forced @@ -69,10 +69,10 @@ class MachineCollectionStateInitializationTest < StateMachinesTest assert_equal 'parked', @object.state end - def test_should_not_initialize_existing_static_states_if_not_forced + def test_should_initialize_existing_static_states_if_not_forced @object.state = 'idling' @machines.initialize_states(@object, static: true) - assert_equal 'idling', @object.state + assert_equal 'parked', @object.state end def test_should_skip_dynamic_states_if_disabled
Change initialize_states static option api To accomodate current tests for this build and activerecord build .. otherwise we will probably have to override moar of activerecord internals
state-machines_state_machines
train
3d93e39edbb400a176a4e4b3f83d29e37eb888f5
diff --git a/lib/swaggerUI.js b/lib/swaggerUI.js index <HASH>..<HASH> 100644 --- a/lib/swaggerUI.js +++ b/lib/swaggerUI.js @@ -3,8 +3,9 @@ var P = require('bluebird'); var fs = P.promisifyAll(require('fs')); var path = require('path'); +// swagger-ui helpfully exports the absolute path of its dist directory +var docRoot = require('swagger-ui').dist + '/'; -var docRoot = __dirname + '/../node_modules/swagger-ui/dist/'; function staticServe (restbase, req) { // Expand any relative paths for security var filePath = req.query.path.replace(/\.\.\//g, '');
Use swagger-ui's dist path export This makes the docs work in setups (like prod) where the node_modules checkout is not inside the restbase repository.
wikimedia_restbase
train
650c5e54f6ca66fa969caf4a55bf0107b5e6bc4d
diff --git a/composer.json b/composer.json index <HASH>..<HASH> 100644 --- a/composer.json +++ b/composer.json @@ -10,8 +10,7 @@ }, "require": { "php": ">=5.3.0", - "clue/graph": "~0.9.0|~0.8.0", - "graphp/algorithms": "~0.8.0" + "clue/graph": "~0.9.0|~0.8.0" }, "require-dev": { "phpunit/phpunit": "^6.4 || ^5.7 || ^4.8.35" diff --git a/src/GraphViz.php b/src/GraphViz.php index <HASH>..<HASH> 100644 --- a/src/GraphViz.php +++ b/src/GraphViz.php @@ -2,13 +2,10 @@ namespace Graphp\GraphViz; -use Graphp\Algorithms\Directed; -use Graphp\Algorithms\Groups; -use Graphp\Algorithms\Degree; -use Fhaculty\Graph\Exception\UnexpectedValueException; -use Fhaculty\Graph\Edge\Base as Edge; -use \stdClass; use Fhaculty\Graph\Attribute\AttributeBagNamespaced; +use Fhaculty\Graph\Edge\Base as Edge; +use Fhaculty\Graph\Edge\Directed as EdgeDirected; +use Fhaculty\Graph\Exception\UnexpectedValueException; use Fhaculty\Graph\Graph; use Fhaculty\Graph\Vertex; @@ -225,8 +222,13 @@ class GraphViz */ public function createScript(Graph $graph) { - $alg = new Directed($graph); - $directed = $alg->hasDirected(); + $directed = false; + foreach ($graph->getEdges() as $edge) { + if ($edge instanceof EdgeDirected) { + $directed = true; + break; + } + } /* * The website [http://www.graphviz.org/content/dot-language] uses the term `ID` when displaying @@ -255,22 +257,23 @@ class GraphViz } } - $alg = new Groups($graph); - // only append group number to vertex label if there are at least 2 different groups - $showGroups = ($alg->getNumberOfGroups() > 1); + $groups = array(); + foreach ($graph->getVertices()->getMap() as $vid => $vertex) { + $groups[$vertex->getGroup()][$vid] = $vertex; + } - if ($showGroups) { - $gid = 0; + // only cluster vertices into groups if there are at least 2 different groups + if (count($groups) > 1) { $indent = str_repeat($this->formatIndent, 2); // put each group of vertices in a separate subgraph cluster - foreach ($alg->getGroups() as $group) { - $script .= $this->formatIndent . 'subgraph cluster_' . $gid++ . ' {' . self::EOL . + foreach ($groups as $group => $vertices) { + $script .= $this->formatIndent . 'subgraph cluster_' . $group . ' {' . self::EOL . $indent . 'label = ' . $this->escape($group) . self::EOL; - foreach($alg->getVerticesGroup($group)->getMap() as $vid => $vertex) { + foreach ($vertices as $vid => $vertex) { $layout = $this->getLayoutVertex($vertex); $script .= $indent . $this->escapeId($vid); - if($layout){ + if ($layout) { $script .= ' ' . $this->escapeAttributes($layout); } $script .= self::EOL; @@ -278,16 +281,14 @@ class GraphViz $script .= ' }' . self::EOL; } } else { - $alg = new Degree($graph); - // explicitly add all isolated vertices (vertices with no edges) and vertices with special layout set // other vertices wil be added automatically due to below edge definitions foreach ($graph->getVertices()->getMap() as $vid => $vertex){ $layout = $this->getLayoutVertex($vertex); - if($layout || $alg->isVertexIsolated($vertex)){ + if ($layout || $vertex->getEdges()->isEmpty()) { $script .= $this->formatIndent . $this->escapeId($vid); - if($layout){ + if ($layout) { $script .= ' ' . $this->escapeAttributes($layout); } $script .= self::EOL; @@ -337,7 +338,7 @@ class GraphViz public static function escape($id) { // see raw() - if ($id instanceof stdClass && isset($id->string)) { + if ($id instanceof \stdClass && isset($id->string)) { return $id->string; } // see @link: There is no semantic difference between abc_2 and "abc_2" @@ -377,7 +378,7 @@ class GraphViz * create a raw string representation, i.e. do NOT escape the given string when used in graphviz output * * @param string $string - * @return StdClass + * @return \stdClass * @see GraphViz::escape() */ public static function raw($string) diff --git a/tests/GraphVizTest.php b/tests/GraphVizTest.php index <HASH>..<HASH> 100644 --- a/tests/GraphVizTest.php +++ b/tests/GraphVizTest.php @@ -71,6 +71,29 @@ VIZ; $this->assertEquals($expected, $this->graphViz->createScript($graph)); } + public function testGraphIsolatedVerticesWithGroupsWillBeAddedToClusters() + { + $graph = new Graph(); + $graph->createVertex('a')->setGroup(0); + $graph->createVertex('b')->setGroup(1)->setAttribute('graphviz.label', 'second'); + + $expected = <<<VIZ +graph { + subgraph cluster_0 { + label = 0 + "a" + } + subgraph cluster_1 { + label = 1 + "b" [label="second"] + } +} + +VIZ; + + $this->assertEquals($expected, $this->graphViz->createScript($graph)); + } + public function testGraphDefaultAttributes() { $graph = new Graph();
Remove unneeded dependency on graphp/algorithms
graphp_graphviz
train
00dcb4f6e3f23139cc638c3061f97a2e999f27ad
diff --git a/src/handlers/Clipboard.js b/src/handlers/Clipboard.js index <HASH>..<HASH> 100644 --- a/src/handlers/Clipboard.js +++ b/src/handlers/Clipboard.js @@ -74,28 +74,28 @@ function pasteHandler(ev, term) { function rightClickHandler(ev, term) { var s = document.getSelection(), sText = prepareTextForClipboard(s.toString()), - r = s.getRangeAt(0); + clickIsOnSelection = false; - var x = ev.clientX, - y = ev.clientY; + if (s.rangeCount) { + var r = s.getRangeAt(0), + cr = r.getClientRects(), + x = ev.clientX, + y = ev.clientY, + i, rect; - var cr = r.getClientRects(), - clickIsOnSelection = false, - i, rect; - - for (i=0; i<cr.length; i++) { - rect = cr[i]; - clickIsOnSelection = ( - (x > rect.left) && (x < rect.right) && - (y > rect.top) && (y < rect.bottom) - ); - // If we clicked on selection and selection is not a single space, - // then mark the right click as copy-only. We check for the single - // space selection, as this can happen when clicking on an &nbsp; - // and there is not much pointing in copying a single space. - // Single space is char - if (clickIsOnSelection && (sText !== ' ')) { - break; + for (i=0; i<cr.length; i++) { + rect = cr[i]; + clickIsOnSelection = ( + (x > rect.left) && (x < rect.right) && + (y > rect.top) && (y < rect.bottom) + ); + // If we clicked on selection and selection is not a single space, + // then mark the right click as copy-only. We check for the single + // space selection, as this can happen when clicking on an &nbsp; + // and there is not much pointing in copying a single space. + if (clickIsOnSelection && (sText !== ' ')) { + break; + } } }
Consider click not on selection, when s.rangeCount is 0
xtermjs_xterm.js
train
50ba9e1bd06c8ee4795917f8e1407a7fee4dbad6
diff --git a/README.md b/README.md index <HASH>..<HASH> 100644 --- a/README.md +++ b/README.md @@ -88,6 +88,7 @@ five.persian() // پنج five.piglatin() // ivefay five.polish() // pięć five.portuguese() // cinco +five.punjabi() // ਪੰਜ five.romanian() // cinci five.russian() // пять five.serbian() // pet diff --git a/five.js b/five.js index <HASH>..<HASH> 100755 --- a/five.js +++ b/five.js @@ -53,6 +53,7 @@ five.piglatin = function() { return 'ivefay'; }; five.polish = function() { return 'pięć'; }; five.portuguese = function () { return 'cinco'; }; + five.punjabi = function () { return 'ਪੰਜ'; }; five.romanian = function() { return 'cinci'; }; five.russian = function() { return 'пять'; }; five.serbian = function() { return 'pet'; }; diff --git a/test.js b/test.js index <HASH>..<HASH> 100755 --- a/test.js +++ b/test.js @@ -48,6 +48,7 @@ assert.equal('پنج', five.persian(), 'A persian five should be پنج'); assert.equal('ivefay', five.piglatin(), 'A piglatin five should be ivefay'); assert.equal('pięć', five.polish(), 'A polish five should be pięć'); assert.equal('cinco', five.portuguese(), 'A portuguese five should be cinco'); +assert.equal('ਪੰਜ', five.punjabi(), 'A punjabi five should be ਪੰਜ'); assert.equal('cinci', five.romanian(), 'A romanian five should be cinci'); assert.equal('пять', five.russian(), 'A russian five should be пять'); assert.equal('pet', five.serbian(), 'A serbian five should be pet');
Adding Punjabi support (ਪੰਜ)
jackdclark_five
train
3b93f9a68345c05d3914d76049aa8efb44080662
diff --git a/zen-core/src/main/java/com/nominanuda/zen/obj/wrap/WrapperInvocationHandler.java b/zen-core/src/main/java/com/nominanuda/zen/obj/wrap/WrapperInvocationHandler.java index <HASH>..<HASH> 100644 --- a/zen-core/src/main/java/com/nominanuda/zen/obj/wrap/WrapperInvocationHandler.java +++ b/zen-core/src/main/java/com/nominanuda/zen/obj/wrap/WrapperInvocationHandler.java @@ -47,6 +47,10 @@ import com.nominanuda.zen.stereotype.Copyable; import com.nominanuda.zen.stereotype.Value; class WrapperInvocationHandler implements InvocationHandler { + private final static Function<String, String> stringToString = s -> s; + private final static Function<String, Integer> stringToInt = s -> Integer.parseInt(s); + private final static Function<String, Long> stringToLong = s -> Long.parseLong(s); + private final Obj o; private final Set<Method> roleMethods; private final Set<Method> defaultMethods; @@ -137,10 +141,17 @@ class WrapperInvocationHandler implements InvocationHandler { if (type.isInterface()) { type = LinkedHashMap.class; } - Map<String, Object> map = (Map<String, Object>) type.newInstance(); + Map<Object, Object> map = (Map<Object, Object>) type.newInstance(); + Function<String, ?> keyConvertor = stringToString; Class<?> itemType = null; try { Tuple2<Class<?>, Class<?>> keyValTypes = getMapReturnComponentTypes(method); + Class<?> keyType = keyValTypes.get0(); + if (Integer.class.equals(keyType)) { + keyConvertor = stringToInt; + } else if (Long.class.equals(keyType)) { + keyConvertor = stringToLong; + } itemType = keyValTypes.get1(); } catch(Exception e) { // dynamic mode on @@ -148,12 +159,12 @@ class WrapperInvocationHandler implements InvocationHandler { for (String key : obj.keySet()) { Object val = obj.get(key); if (itemType == null && val == null) { - map.put(key, null); + map.put(keyConvertor.apply(key), null); } else { - if(itemType == null) { + if (itemType == null) { itemType = val.getClass(); } - map.put(key, fromObjValue(val, itemType)); + map.put(keyConvertor.apply(key), fromObjValue(val, itemType)); } } return map;
getters do parseInt/parseLong in case of Map<Integer,?>, Map<Long,?>
nominanuda_zen-project
train
cc493475c4c6f51b7157062f54c2697ca3e3ca69
diff --git a/Response.php b/Response.php index <HASH>..<HASH> 100644 --- a/Response.php +++ b/Response.php @@ -155,7 +155,15 @@ class Response extends Message } } } - return new Cookie($params); + + $cookie = new Cookie(); + foreach ($params as $name => $value) { + if ($cookie->canSetProperty($name)) { + // Cookie string may contain custom unsupported params + $cookie->$name = $value; + } + } + return $cookie; } /** @@ -167,6 +175,7 @@ class Response extends Message static $nameMap = [ 'expires' => 'expire', 'httponly' => 'httpOnly', + 'max-age' => 'maxAge', ]; $name = strtolower($rawName); if (isset($nameMap[$name])) { diff --git a/tests/ResponseTest.php b/tests/ResponseTest.php index <HASH>..<HASH> 100644 --- a/tests/ResponseTest.php +++ b/tests/ResponseTest.php @@ -142,6 +142,7 @@ class ResponseTest extends TestCase $response = new Response(); $response->setHeaders(['set-cookie' => 'COUNTRY=NA%2C195.177.208.1; expires=Thu, 23-Jul-2015 13:39:41 GMT; path=/; domain=.php.net']); $cookie = $response->getCookies()->get('COUNTRY'); + $this->assertTrue($cookie instanceof Cookie); $response = new Response(); $response->setHeaders(['set-cookie' => [ @@ -149,6 +150,12 @@ class ResponseTest extends TestCase 'name2=value2; path=/; httponly', ]]); $this->assertEquals(2, $response->getCookies()->count()); + + // @see https://github.com/yiisoft/yii2-httpclient/issues/29 + $response = new Response(); + $response->setHeaders(['set-cookie' => 'extraParam=maxAge; path=/; httponly; Max-Age=3600']); + $cookie = $response->getCookies()->get('extraParam'); + $this->assertTrue($cookie instanceof Cookie); } public function testToString()
Added skipping of unsupported params at `Response::parseCookie()`
yiisoft_yii2-httpclient
train
ed5ce442665c69a7fe95f632808f56909269ce1b
diff --git a/lib/geokit/geocoders/openstreetmap.rb b/lib/geokit/geocoders/openstreetmap.rb index <HASH>..<HASH> 100644 --- a/lib/geokit/geocoders/openstreetmap.rb +++ b/lib/geokit/geocoders/openstreetmap.rb @@ -15,7 +15,7 @@ module Geokit address_str = address.is_a?(GeoLoc) ? address.to_geocodeable_s : address - url = "http://nominatim.openstreetmap.org/search?format=json#{options_str}&addressdetails=1&q=#{Geokit::Inflector.url_escape(address_str)}" + url = "https://nominatim.openstreetmap.org/search?format=json#{options_str}&addressdetails=1&q=#{Geokit::Inflector.url_escape(address_str)}" process :json, url end @@ -29,7 +29,7 @@ module Geokit options_str << generate_param_for_option(:osm_type, options) options_str << generate_param_for_option(:osm_id, options) options_str << generate_param_for_option(:json_callback, options) - url = "http://nominatim.openstreetmap.org/reverse?format=json&addressdetails=1#{options_str}" + url = "https://nominatim.openstreetmap.org/reverse?format=json&addressdetails=1#{options_str}" process :json, url end
OSM : moved over to https to avoid unfollowed <I> redirect !
geokit_geokit
train
789d9d8ca15993ce938c0220e5cc93769c80a3ce
diff --git a/fbchat/_file.py b/fbchat/_file.py index <HASH>..<HASH> 100644 --- a/fbchat/_file.py +++ b/fbchat/_file.py @@ -21,6 +21,15 @@ class FileAttachment(Attachment): # Put here for backwards compatibility, so that the init argument order is preserved uid = attr.ib(None) + @classmethod + def _from_graphql(cls, data): + return cls( + url=data.get("url"), + name=data.get("filename"), + is_malicious=data.get("is_malicious"), + uid=data.get("message_file_fbid"), + ) + @attr.s(cmp=False) class AudioAttachment(Attachment): @@ -38,6 +47,15 @@ class AudioAttachment(Attachment): # Put here for backwards compatibility, so that the init argument order is preserved uid = attr.ib(None) + @classmethod + def _from_graphql(cls, data): + return cls( + filename=data.get("filename"), + url=data.get("playable_url"), + duration=data.get("playable_duration_in_ms"), + audio_type=data.get("audio_type"), + ) + @attr.s(cmp=False, init=False) class ImageAttachment(Attachment): @@ -122,6 +140,21 @@ class ImageAttachment(Attachment): self.animated_preview_width = animated_preview.get("width") self.animated_preview_height = animated_preview.get("height") + @classmethod + def _from_graphql(cls, data): + return cls( + original_extension=data.get("original_extension") + or (data["filename"].split("-")[0] if data.get("filename") else None), + width=data.get("original_dimensions", {}).get("width"), + height=data.get("original_dimensions", {}).get("height"), + is_animated=data["__typename"] == "MessageAnimatedImage", + thumbnail_url=data.get("thumbnail", {}).get("uri"), + preview=data.get("preview") or data.get("preview_image"), + large_preview=data.get("large_preview"), + animated_preview=data.get("animated_image"), + uid=data.get("legacy_attachment_id"), + ) + @attr.s(cmp=False, init=False) class VideoAttachment(Attachment): @@ -195,3 +228,16 @@ class VideoAttachment(Attachment): self.large_image_url = large_image.get("uri") self.large_image_width = large_image.get("width") self.large_image_height = large_image.get("height") + + @classmethod + def _from_graphql(cls, data): + return cls( + width=data.get("original_dimensions", {}).get("width"), + height=data.get("original_dimensions", {}).get("height"), + duration=data.get("playable_duration_in_ms"), + preview_url=data.get("playable_url"), + small_image=data.get("chat_image"), + medium_image=data.get("inbox_image"), + large_image=data.get("large_image"), + uid=data.get("legacy_attachment_id"), + ) diff --git a/fbchat/_graphql.py b/fbchat/_graphql.py index <HASH>..<HASH> 100644 --- a/fbchat/_graphql.py +++ b/fbchat/_graphql.py @@ -63,43 +63,13 @@ def get_customization_info(thread): def graphql_to_attachment(a): _type = a["__typename"] if _type in ["MessageImage", "MessageAnimatedImage"]: - return ImageAttachment( - original_extension=a.get("original_extension") - or (a["filename"].split("-")[0] if a.get("filename") else None), - width=a.get("original_dimensions", {}).get("width"), - height=a.get("original_dimensions", {}).get("height"), - is_animated=_type == "MessageAnimatedImage", - thumbnail_url=a.get("thumbnail", {}).get("uri"), - preview=a.get("preview") or a.get("preview_image"), - large_preview=a.get("large_preview"), - animated_preview=a.get("animated_image"), - uid=a.get("legacy_attachment_id"), - ) + return ImageAttachment._from_graphql(a) elif _type == "MessageVideo": - return VideoAttachment( - width=a.get("original_dimensions", {}).get("width"), - height=a.get("original_dimensions", {}).get("height"), - duration=a.get("playable_duration_in_ms"), - preview_url=a.get("playable_url"), - small_image=a.get("chat_image"), - medium_image=a.get("inbox_image"), - large_image=a.get("large_image"), - uid=a.get("legacy_attachment_id"), - ) + return VideoAttachment._from_graphql(a) elif _type == "MessageAudio": - return AudioAttachment( - filename=a.get("filename"), - url=a.get("playable_url"), - duration=a.get("playable_duration_in_ms"), - audio_type=a.get("audio_type"), - ) + return AudioAttachment._from_graphql(a) elif _type == "MessageFile": - return FileAttachment( - url=a.get("url"), - name=a.get("filename"), - is_malicious=a.get("is_malicious"), - uid=a.get("message_file_fbid"), - ) + return FileAttachment._from_graphql(a) else: return Attachment(uid=a.get("legacy_attachment_id"))
Split graphql_to_attachment into smaller methods
carpedm20_fbchat
train
9012d15889773f0f4ef552e8245f933a24e71d84
diff --git a/userena/decorators.py b/userena/decorators.py index <HASH>..<HASH> 100644 --- a/userena/decorators.py +++ b/userena/decorators.py @@ -23,12 +23,6 @@ def secure_required(view_func): """ def _wrapped_view(request, *args, **kwargs): - if 'HTTP_X_FORWARDED_SSL' in request.META: - request.is_secure = lambda: request.META['HTTP_X_FORWARDED_SSL'] == 'on' - - if 'HTTP_X_FORWARDED_PROTOCOL' in request.META: - request.is_secure = lambda: request.META['HTTP_X_FORWARDED_PROTOCOL'] == 'https' - if not request.is_secure(): if userena_settings.USERENA_USE_HTTPS: request_url = request.build_absolute_uri(request.get_full_path())
Moved hacks from ``secure_required``. SSL should be configured in WSGI.
django-userena-ce_django-userena-ce
train
b7c9c3a114c3ce3789234bdaa40548d1533dc4e4
diff --git a/src/collectors/httpd/httpd.py b/src/collectors/httpd/httpd.py index <HASH>..<HASH> 100644 --- a/src/collectors/httpd/httpd.py +++ b/src/collectors/httpd/httpd.py @@ -27,7 +27,7 @@ class HttpdCollector(diamond.collector.Collector): self.urls = {} for url in self.config['urls']: if ' ' in url: - parts = url.split() + parts = url.split(' ') self.urls[parts[0]] = parts[1] else: self.urls[''] = url
Fix #<I>, this splits the url correctly into parts before processing
python-diamond_Diamond
train
eb5e3fe2791ca3a1b33d3b38b50b6e60f8331e2c
diff --git a/lib/fog/hp/requests/storage/delete_container.rb b/lib/fog/hp/requests/storage/delete_container.rb index <HASH>..<HASH> 100644 --- a/lib/fog/hp/requests/storage/delete_container.rb +++ b/lib/fog/hp/requests/storage/delete_container.rb @@ -25,7 +25,7 @@ module Fog response = Excon::Response.new if self.data[:containers][container_name].nil? response.status = 404 - raise(Excon::Errors.status_error({:expects => 204}, response)) + raise Fog::Storage::HP::NotFound elsif self.data[:containers][container_name] && !self.data[:containers][container_name][:objects].empty? response.status = 409 raise(Excon::Errors.status_error({:expects => 204}, response)) diff --git a/lib/fog/hp/requests/storage/get_container.rb b/lib/fog/hp/requests/storage/get_container.rb index <HASH>..<HASH> 100644 --- a/lib/fog/hp/requests/storage/get_container.rb +++ b/lib/fog/hp/requests/storage/get_container.rb @@ -79,11 +79,10 @@ module Fog 'Content-Type' => container['Content-Type'], 'Content-Length' => container['Content-Length'] } + response else - response.status = 404 - raise(Excon::Errors.status_error({:expects => 200}, response)) + raise Fog::Storage::HP::NotFound end - response end end
Raise the correct exception in the mock.
fog_fog
train
ba3837261430abcdc909461ec3ec5e156801ea50
diff --git a/src/com/cctintl/c3dfx/skins/C3DTextFieldSkin.java b/src/com/cctintl/c3dfx/skins/C3DTextFieldSkin.java index <HASH>..<HASH> 100644 --- a/src/com/cctintl/c3dfx/skins/C3DTextFieldSkin.java +++ b/src/com/cctintl/c3dfx/skins/C3DTextFieldSkin.java @@ -208,9 +208,10 @@ public class C3DTextFieldSkin extends TextFieldSkin{ super.layoutChildren(x, y, w, h); if(invalid){ - + textPane = ((Pane)this.getChildren().get(0)); - + textPane.prefWidthProperty().bind(getSkinnable().prefWidthProperty()); + line.setStartX(0); line.endXProperty().bind(textPane.widthProperty()); line.startYProperty().bind(textPane.heightProperty()); @@ -232,7 +233,6 @@ public class C3DTextFieldSkin extends TextFieldSkin{ mid = (endX - startX )/2; focusedLine.setStartX(mid); focusedLine.setEndX(mid); - System.out.println(startX); }); startX = 0;
fixed a bug related to text field width using the new layout
jfoenixadmin_JFoenix
train
1b69003a4b4fbb676d6b739c24f06a83bff98ada
diff --git a/EventListener/OrderListener.php b/EventListener/OrderListener.php index <HASH>..<HASH> 100644 --- a/EventListener/OrderListener.php +++ b/EventListener/OrderListener.php @@ -79,7 +79,16 @@ class OrderListener implements EventSubscriberInterface public function onKernelTerminate(PostResponseEvent $event) { - $this->getOrderUpdater()->processIds(); + $requestMethod = $event->getRequest()->getMethod(); + $match = false; + $requestUri = $event->getRequest()->getRequestUri(); + if (strpos($requestUri, '/api/orders')) { + $match = true; + } + + if ($match && $requestMethod == 'PUT' || $requestMethod == 'POST') { + $this->getOrderUpdater()->processIds(); + } } /**
Update total price just on put and post orders
sulu_SuluSalesOrderBundle
train
d13d3f2d317946d7318893298609d2efb3c84ccf
diff --git a/lib/tests/environment_test.php b/lib/tests/environment_test.php index <HASH>..<HASH> 100644 --- a/lib/tests/environment_test.php +++ b/lib/tests/environment_test.php @@ -39,7 +39,7 @@ class environment_testcase extends advanced_testcase { $this->assertNotEmpty($envstatus); foreach ($environment_results as $environment_result) { - $this->assertTrue($environment_result->getStatus(), "Problem detected in environment ($environment_result->part:$environment_result->part), fix all warnings and errors!"); + $this->assertTrue($environment_result->getStatus(), "Problem detected in environment ($environment_result->part:$environment_result->info), fix all warnings and errors!"); } } }
MDL-<I> Show env. info instead of part twice.
moodle_moodle
train
560fecc5c8240f9b9ebc44987f10d21bd5db7487
diff --git a/tests.py b/tests.py index <HASH>..<HASH> 100644 --- a/tests.py +++ b/tests.py @@ -40,8 +40,12 @@ import ssl import time import unittest -from AdvancedHTTPServer import * +from AdvancedHTTPServer import AdvancedHTTPServerRegisterPath +from AdvancedHTTPServer import AdvancedHTTPServerRPCClient +from AdvancedHTTPServer import AdvancedHTTPServerRPCClientCached +from AdvancedHTTPServer import AdvancedHTTPServerRPCError from AdvancedHTTPServer import AdvancedHTTPServerSerializer +from AdvancedHTTPServer import AdvancedHTTPServerTestCase from AdvancedHTTPServer import build_serializer_from_content_type from AdvancedHTTPServer import has_msgpack from AdvancedHTTPServer import random_string
Fix import statements in tests to avoid wildcards
zeroSteiner_AdvancedHTTPServer
train
b4aaeb65e1673e9f80c39156069132f4d232f3c0
diff --git a/library/Garp/Functional/Filter.php b/library/Garp/Functional/Filter.php index <HASH>..<HASH> 100644 --- a/library/Garp/Functional/Filter.php +++ b/library/Garp/Functional/Filter.php @@ -15,6 +15,9 @@ namespace Garp\Functional; * @return mixed */ function filter($fn, $collection = null) { + if (!is_callable($fn)) { + throw new \InvalidArgumentException('filter expects the first argument to be callable'); + } $filterer = function ($collection) use ($fn) { if (is_array($collection)) { $numericKeys = array_filter(array_keys($collection), 'is_numeric'); diff --git a/library/Garp/Functional/PropEquals.php b/library/Garp/Functional/PropEquals.php index <HASH>..<HASH> 100644 --- a/library/Garp/Functional/PropEquals.php +++ b/library/Garp/Functional/PropEquals.php @@ -14,9 +14,15 @@ namespace Garp\Functional; * @param mixed $obj * @return bool */ -function prop_equals($prop, $value, $obj = null) { - $checker = function ($obj) use ($prop, $value) { - return prop($prop, $obj) === $value; +function prop_equals($prop, $value = null, $obj = null) { + if (func_num_args() === 1) { + return partial('Garp\Functional\prop_equals', $prop); + } + $checker = function ($value, $obj = null) use ($prop) { + $checker2 = function ($obj) use ($prop, $value) { + return prop($prop, $obj) === $value; + }; + return func_num_args() < 2 ? $checker2 : $checker2($obj); }; - return func_num_args() < 3 ? $checker : $checker($obj); + return func_num_args() < 3 ? $checker($value) : $checker($value, $obj); } diff --git a/tests/PropEqualsTest.php b/tests/PropEqualsTest.php index <HASH>..<HASH> 100644 --- a/tests/PropEqualsTest.php +++ b/tests/PropEqualsTest.php @@ -58,5 +58,35 @@ class PropEqualsTest extends TestCase { ); } + public function test_should_be_thrice_curried() { + $musicians = array( + array('first_name' => 'Miles', 'last_name' => 'Davis', 'instrument' => 'trumpet'), + array('first_name' => 'John', 'last_name' => 'Coltrane', 'instrument' => 'saxophone'), + array('first_name' => 'Louis', 'last_name' => 'Armstrong', 'instrument' => 'trumpet'), + array('first_name' => 'Thelonious', 'last_name' => 'Monk', 'instrument' => 'piano'), + array('first_name' => 'Charlie', 'last_name' => 'Parker', 'instrument' => 'saxophone') + ); + $plays = f\prop_equals('instrument'); + $this->assertTrue(is_callable($plays)); + $this->assertTrue(is_callable($plays('saxophone'))); + + $saxOrPianoPlayers = f\filter( + f\either($plays('saxophone'), $plays('piano')), + $musicians + ); + $this->assertEquals( + array( + array( + 'first_name' => 'John', 'last_name' => 'Coltrane', 'instrument' => 'saxophone' + ), + array('first_name' => 'Thelonious', 'last_name' => 'Monk', 'instrument' => 'piano'), + array( + 'first_name' => 'Charlie', 'last_name' => 'Parker', 'instrument' => 'saxophone' + ) + ), + $saxOrPianoPlayers + ); + } + }
Made prop_equals thrice curried
grrr-amsterdam_garp-functional
train
af7b650b7cf0620623c6503f39125ea714bb024f
diff --git a/tests/test_component/test_stage.py b/tests/test_component/test_stage.py index <HASH>..<HASH> 100644 --- a/tests/test_component/test_stage.py +++ b/tests/test_component/test_stage.py @@ -24,7 +24,7 @@ def test_stage_initialization(): s = Stage() - assert s.uid == None + assert s.uid == 'stage.0000' assert s.name == None assert s.tasks == set() assert s.state == states.INITIAL
early uid assignment changes test in stage
radical-cybertools_radical.entk
train
4037a5365f109f1f0a4406f0a32c8a387410bbc8
diff --git a/molgenis-data-rest/src/main/java/org/molgenis/data/rest/v2/AttributeResponseV2.java b/molgenis-data-rest/src/main/java/org/molgenis/data/rest/v2/AttributeResponseV2.java index <HASH>..<HASH> 100644 --- a/molgenis-data-rest/src/main/java/org/molgenis/data/rest/v2/AttributeResponseV2.java +++ b/molgenis-data-rest/src/main/java/org/molgenis/data/rest/v2/AttributeResponseV2.java @@ -49,8 +49,7 @@ class AttributeResponseV2 * Constructs AttributeResponseV2 using params * * @param fetch set of lowercase attribute names to include in response - * @param includeCategories if set to true fetches options list for CATEGORICAL and CATEGORICAL_MREF types, - * if set to false references to the entities are returned + * @param includeCategories if set to true includes options list for CATEGORICAL and CATEGORICAL_MREF types in the attribute metadata */ public AttributeResponseV2(final String entityParentName, EntityType entityType, Attribute attr, Fetch fetch, UserPermissionEvaluator permissionService, DataService dataService, boolean includeCategories) @@ -142,7 +141,7 @@ class AttributeResponseV2 } /** - * Default AttributeResponseV2 with @param isCategoricalGetEager set to false + * Default AttributeResponseV2 with @param includeCategories set to false * * @param fetch set of lowercase attribute names to include in response */ diff --git a/molgenis-data-rest/src/main/java/org/molgenis/data/rest/v2/RestControllerV2.java b/molgenis-data-rest/src/main/java/org/molgenis/data/rest/v2/RestControllerV2.java index <HASH>..<HASH> 100644 --- a/molgenis-data-rest/src/main/java/org/molgenis/data/rest/v2/RestControllerV2.java +++ b/molgenis-data-rest/src/main/java/org/molgenis/data/rest/v2/RestControllerV2.java @@ -638,7 +638,7 @@ public class RestControllerV2 } private EntityCollectionResponseV2 createEntityCollectionResponse(String entityTypeId, - EntityCollectionRequestV2 request, HttpServletRequest httpRequest, boolean includeLookup) + EntityCollectionRequestV2 request, HttpServletRequest httpRequest, boolean includeCategories) { EntityType meta = dataService.getEntityType(entityTypeId); @@ -709,7 +709,7 @@ public class RestControllerV2 } return new EntityCollectionResponseV2(pager, entities, fetch, BASE_URI + '/' + entityTypeId, meta, - permissionService, dataService, prevHref, nextHref, includeLookup); + permissionService, dataService, prevHref, nextHref, includeCategories); } } @@ -733,20 +733,20 @@ public class RestControllerV2 return createEntityResponse(entity, fetch, includeMetaData, false); } - private Map<String, Object> createEntityResponse(Entity entity, Fetch fetch, boolean includeMetaData, boolean includeLookup) + private Map<String, Object> createEntityResponse(Entity entity, Fetch fetch, boolean includeMetaData, boolean includeCategories) { Map<String, Object> responseData = new LinkedHashMap<>(); if (includeMetaData) { - createEntityTypeResponse(entity.getEntityType(), fetch, responseData, includeLookup); + createEntityTypeResponse(entity.getEntityType(), fetch, responseData, includeCategories); } createEntityValuesResponse(entity, fetch, responseData); return responseData; } - private void createEntityTypeResponse(EntityType entityType, Fetch fetch, Map<String, Object> responseData, boolean includeLookup) + private void createEntityTypeResponse(EntityType entityType, Fetch fetch, Map<String, Object> responseData, boolean includeCategories) { - responseData.put("_meta", new EntityTypeResponseV2(entityType, fetch, permissionService, dataService, includeLookup)); + responseData.put("_meta", new EntityTypeResponseV2(entityType, fetch, permissionService, dataService, includeCategories)); } private void createEntityValuesResponse(Entity entity, Fetch fetch, Map<String, Object> responseData)
process review comments - update javadoc - fix param names
molgenis_molgenis
train
fd3e10eda55abafce517744b92b13cf1214212ba
diff --git a/Table/Sqlite.php b/Table/Sqlite.php index <HASH>..<HASH> 100644 --- a/Table/Sqlite.php +++ b/Table/Sqlite.php @@ -168,8 +168,11 @@ class Nada_Table_Sqlite extends Nada_Table // Release savepoint $this->_database->exec('RELEASE ' . __FUNCTION__); - // Update column cache - $this->_columns = $newColumns; + // Update column cache (update keys as well in case of renamed columns) + $this->_columns = array(); + foreach ($newColumns as $column) { + $this->_columns[$column->getName()] = $column; + } $this->_primaryKey = array(); foreach ($pkColumns as $pkColumnName) { $this->_primaryKey[] = $this->_columns[$pkColumnName];
Fixed incorret keys after renaming a column.
hschletz_NADA
train
e4a048679e7bbcc07af1fc521ff6db8e50175e3e
diff --git a/dao/elasticsearch/filesystem.go b/dao/elasticsearch/filesystem.go index <HASH>..<HASH> 100644 --- a/dao/elasticsearch/filesystem.go +++ b/dao/elasticsearch/filesystem.go @@ -24,6 +24,7 @@ import ( model "github.com/control-center/serviced/dao" "github.com/control-center/serviced/datastore" + "github.com/control-center/serviced/volume" "github.com/zenoss/glog" ) @@ -311,19 +312,27 @@ func (dao *ControlPlaneDao) ListSnapshots(serviceID string, snapshots *[]model.S return err } for _, snapshotID := range snapshotIDs { + var newInfo model.SnapshotInfo + info, err := dao.facade.GetSnapshotInfo(ctx, snapshotID) - if err != nil { - return err - } + if err == volume.ErrInvalidSnapshot { + newInfo = model.SnapshotInfo{ + SnapshotID: snapshotID, + Invalid: true, + } - newInfo := model.SnapshotInfo{ - SnapshotID: info.Name, - TenantID: info.TenantID, - Description: info.Message, - Tags: info.Tags, - Created: info.Created, + } else if err != nil { + return err + } else { + newInfo = model.SnapshotInfo{ + SnapshotID: info.Name, + TenantID: info.TenantID, + Description: info.Message, + Tags: info.Tags, + Created: info.Created, + Invalid: false, + } } - *snapshots = append(*snapshots, newInfo) } return diff --git a/dao/model.go b/dao/model.go index <HASH>..<HASH> 100644 --- a/dao/model.go +++ b/dao/model.go @@ -126,13 +126,19 @@ type SnapshotInfo struct { Description string Tags []string Created time.Time + Invalid bool } func (s SnapshotInfo) String() string { + snapshotID := s.SnapshotID + if s.Invalid { + snapshotID += " [INVALID]" + } + if s.Description == "" { - return s.SnapshotID + return snapshotID } else { - return s.SnapshotID + " " + s.Description + return snapshotID + " " + s.Description } }
Add Invalid flag to dao.SnapshotInfo, and modified dao to apply the flag to invalid snapshots when listing them.
control-center_serviced
train
159748c9669efc430ee1a3e99d1585641d9c4a6b
diff --git a/Neos.RedirectHandler.DatabaseStorage/Classes/RedirectStorage.php b/Neos.RedirectHandler.DatabaseStorage/Classes/RedirectStorage.php index <HASH>..<HASH> 100644 --- a/Neos.RedirectHandler.DatabaseStorage/Classes/RedirectStorage.php +++ b/Neos.RedirectHandler.DatabaseStorage/Classes/RedirectStorage.php @@ -51,13 +51,6 @@ class RedirectStorage implements RedirectStorageInterface protected $routerCachingService; /** - * Runtime cache to avoid creating multiple time the same redirect - * - * @var array - */ - protected $runtimeCache = []; - - /** * @Flow\InjectConfiguration(path="statusCode", package="Neos.RedirectHandler") * @var array */ @@ -171,15 +164,10 @@ class RedirectStorage implements RedirectStorageInterface */ protected function addRedirectionByHost($sourceUriPath, $targetUriPath, $statusCode, $host = null) { - $hash = md5($host . $sourceUriPath . $targetUriPath . $statusCode); - if (isset($this->runtimeCache[$hash])) { - return $this->runtimeCache[$hash]; - } $redirect = new Redirect($sourceUriPath, $targetUriPath, $statusCode, $host); $this->updateDependingRedirects($redirect); $this->redirectRepository->add($redirect); $this->routerCachingService->flushCachesForUriPath($sourceUriPath); - $this->runtimeCache[$hash] = $redirect; return RedirectDto::create($redirect); }
TASK: Remove RuntimeCache in RedirectStorage
neos_flow-development-collection
train
086c1c5ca52b0ed8b810ad5a293a574ba990e635
diff --git a/sos/cleaner/mappings/ip_map.py b/sos/cleaner/mappings/ip_map.py index <HASH>..<HASH> 100644 --- a/sos/cleaner/mappings/ip_map.py +++ b/sos/cleaner/mappings/ip_map.py @@ -121,13 +121,12 @@ class SoSIPMap(SoSMap): # network and if it has, replace the default /32 netmask that # ipaddress applies to no CIDR-notated addresses self.set_ip_cidr_from_existing_subnet(addr) - return self.sanitize_ipaddr(addr) else: # we have a CIDR notation, so generate an obfuscated network # address and then generate an IP address within that network's # range self.sanitize_network(network) - return self.sanitize_ipaddr(addr) + return self.sanitize_ipaddr(addr) def sanitize_network(self, network): """Obfuscate the network address provided, and if there are host bits
[cleaner] more streamlined sanitize_item method Remove a duplicate call in both IF branches. Resolves: #<I>
sosreport_sos
train
094b7030b6ca1a75fb75e81e2d43e4fb723aa81c
diff --git a/cellpy/readers/instruments/arbin_sql.py b/cellpy/readers/instruments/arbin_sql.py index <HASH>..<HASH> 100644 --- a/cellpy/readers/instruments/arbin_sql.py +++ b/cellpy/readers/instruments/arbin_sql.py @@ -34,7 +34,11 @@ DEBUG_MODE = prms.Reader.diagnostics # not used ALLOW_MULTI_TEST_FILE = prms._allow_multi_test_file # not used ODBC = prms._odbc SEARCH_FOR_ODBC_DRIVERS = prms._search_for_odbc_driver # not used -SERVER = prms.Instruments.Arbin["SQL_server"] +SQL_SERVER = prms.Instruments.Arbin["SQL_server"] +SQL_UID = prms.Instruments.Arbin["SQL_UID"] +SQL_PWD = prms.Instruments.Arbin["SQL_PWD"] +SQL_DRIVER = prms.Instruments.Arbin["SQL_Driver"] + # Names of the tables in the SQL Server db that is used by cellpy @@ -137,7 +141,7 @@ class ArbinSQLLoader(Loader): self.arbin_headers_aux_global = self.get_headers_aux_global() self.arbin_headers_aux = self.get_headers_aux() self.current_chunk = 0 # use this to set chunks to load - self.server = SERVER + self.server = SQL_SERVER @staticmethod def get_headers_normal(): @@ -252,7 +256,7 @@ class ArbinSQLLoader(Loader): # selecting only one value (might implement multi-channel/id use later) test_id = data_df["Test_ID"].iloc[0] - id_name = f"{SERVER}:{name}:{test_id}" + id_name = f"{SQL_SERVER}:{name}:{test_id}" channel_id = data_df["Channel_ID"].iloc[0] @@ -330,9 +334,9 @@ class ArbinSQLLoader(Loader): hdr_data_point = self.cellpy_headers_normal.data_point_txt if data.raw.index.name != hdr_data_point: data.raw = data.raw.set_index(hdr_data_point, drop=False) - + hdr_date_time = self.arbin_headers_normal.datetime_txt - data.start_datetime = parse(data.raw[hdr_date_time].iat[0]) + data.start_datetime = parse("20"+ data.raw[hdr_date_time].iat[0][:-7]) return data @@ -340,14 +344,14 @@ class ArbinSQLLoader(Loader): # TODO: refactor and include optional SQL arguments name_str = f"('{name}', '')" con_str = ( - "Driver={SQL Server};Server=" + self.server + ";Trusted_Connection=yes;" + "Driver={" + SQL_DRIVER + "}" + f";Server={SQL_SERVER};UID={SQL_UID};PWD={SQL_PWD};Trusted_Connection=yes;" ) master_q = ( "SELECT Database_Name, Test_Name FROM " "ArbinPro8MasterInfo.dbo.TestList_Table WHERE " f"ArbinPro8MasterInfo.dbo.TestList_Table.Test_Name IN {name_str}" ) - + print("Connecting with string" + con_str) conn = pyodbc.connect(con_str) sql_query = pd.read_sql_query(master_q, conn) @@ -448,7 +452,7 @@ def test_query(): import pathlib name = ["20201106_HC03B1W_1_cc_01"] - dd, ds = test_sql_loader(SERVER, name) + dd, ds = test_sql_loader(SQL_SERVER, name) out = pathlib.Path(r"C:\scripts\notebooks\Div") dd.to_clipboard() input("x")
Adding support for SQL userparams
jepegit_cellpy
train
8856ad127e4b7383926cf46713fdd373cdcfc530
diff --git a/README.md b/README.md index <HASH>..<HASH> 100644 --- a/README.md +++ b/README.md @@ -172,7 +172,7 @@ fm_elfinder: * **assets_path** - this is where css/js files of the bundle are, this options should be the same as composers `component-dir` option. * **default** - instance of elfinder, can be used to define multiple configurations of ElFinder, allows simultaneous configuration for different types of WYSIWYG editors in your project * **path** - define root directory for the files inside web/ directory, default is "uploads". Make sure to set proper write/read and owner permissions to this directory. -* **url** - url to be prefixed to image path, for displaying. Can be either `absolute` or `relative`. If relative, it will be prefixed with the applications base-url. If left blank, url will be the base-url, append with the value of the 'path' parameter +* **url** - url to be prefixed to image path, for displaying. Can be either `absolute` or `relative`. If absolute, you can use `{homeFolder}` string as placeholder which will be replaced automatically. If relative, it will be prefixed with the applications base-url. If left blank, url will be the base-url, append with the value of the 'path' parameter * **driver** - can be LocalFileSystem, FTP or MySQL, Flysystem, S3 and etc, check class FM\ElfinderBundle\DependencyInjection\Configuration * **locale** - locale determines, which language, ElFinder will use, to translate user interface, default is current request locale * **cors_support** - allows cross domain responses handling (default false) diff --git a/src/Configuration/ElFinderConfigurationReader.php b/src/Configuration/ElFinderConfigurationReader.php index <HASH>..<HASH> 100644 --- a/src/Configuration/ElFinderConfigurationReader.php +++ b/src/Configuration/ElFinderConfigurationReader.php @@ -178,7 +178,7 @@ class ElFinderConfigurationReader implements ElFinderConfigurationProviderInterf { if (isset($parameter['url']) && $parameter['url']) { if (0 === strpos($parameter['url'], 'http')) { - return $parameter['url']; + return str_replace('{homeFolder}', $homeFolder, $parameter['url']); } $path = $parameter['url'].'/'.$homeFolder; diff --git a/tests/Configuration/ElFinderConfigurationReaderTest.php b/tests/Configuration/ElFinderConfigurationReaderTest.php index <HASH>..<HASH> 100644 --- a/tests/Configuration/ElFinderConfigurationReaderTest.php +++ b/tests/Configuration/ElFinderConfigurationReaderTest.php @@ -219,6 +219,58 @@ class ElFinderConfigurationReaderTest extends \PHPUnit\Framework\TestCase ), ), ), + 'without_path_with_url_absolute_homeFolder' => array( + 'cors_support' => true, + 'connector' => array( + 'debug' => '', 'binds' => '', 'plugins' => '', + 'roots' => array( + 'uploads' => array( + 'flysystem' => array('enabled' => false), + 'volume_id' => 2, + 'security_voter' => '', + 'show_hidden' => false, + 'path' => '', + 'driver' => 'LocalFileSystem', + 'url' => 'https://test.com/{homeFolder}', + 'glide_url' => '', + 'glide_key' => '', + 'plugins' => '', + 'driver_options' => '', + 'start_path' => '', + 'encoding' => '', + 'alias' => '', + 'mime_detect' => '', + 'mimefile' => '', + 'img_lib' => '', + 'tmb_path' => '', + 'tmb_path_mode' => '', + 'tmb_url' => '', + 'tmb_size' => '', + 'tmb_crop' => '', + 'tmb_bg_color' => '', + 'copy_overwrite' => '', + 'copy_join' => '', + 'copy_from' => '', + 'copy_to' => '', + 'upload_overwrite' => '', + 'upload_allow' => '', + 'upload_deny' => '', + 'upload_max_size' => '', + 'defaults' => '', + 'attributes' => '', + 'accepted_name' => '', + 'disabled_commands' => '', + 'tree_deep' => '', + 'check_subfolders' => '', + 'separator' => '', + 'time_format' => '', + 'archive_mimes' => '', + 'archivers' => '', + 'fileMode' => '', + ), + ), + ), + ), ), ); @@ -307,6 +359,12 @@ class ElFinderConfigurationReaderTest extends \PHPUnit\Framework\TestCase $configuration = $reader->getConfiguration('without_path_with_url'); $this->assertEquals('/bob', $configuration['roots'][0]['path']); $this->assertEquals('http://test.com/unit-test/home-url-without-path/bob', $configuration['roots'][0]['URL']); + + // without path and with url absolute and homeFolder + $reader = $this->getConfigurationReader($this->getHomeFolderAwareAttributesObject()); + $configuration = $reader->getConfiguration('without_path_with_url_absolute_homeFolder'); + $this->assertEquals('/bob', $configuration['roots'][0]['path']); + $this->assertEquals('https://test.com/bob', $configuration['roots'][0]['URL']); } public function testAccessTmbURLOption()
Add homeFolder to url (#<I>)
helios-ag_FMElfinderBundle
train
cff9ae234ddfd2ff2f1663b7b1618059ce9950ff
diff --git a/src/tinymce.js b/src/tinymce.js index <HASH>..<HASH> 100644 --- a/src/tinymce.js +++ b/src/tinymce.js @@ -9,12 +9,18 @@ angular.module('ui.tinymce', []) return { require: 'ngModel', link: function (scope, elm, attrs, ngModel) { - var expression, options, tinyInstance; + var expression, options, tinyInstance, + updateView = function () { + ngModel.$setViewValue(elm.val()); + if (!scope.$$phase) { + scope.$apply(); + } + }; // generate an ID if not present if (!attrs.id) { attrs.$set('id', 'uiTinymce' + generatedIds++); } - + if (attrs.uiTinymce) { expression = scope.$eval(attrs.uiTinymce); } else { @@ -30,18 +36,17 @@ angular.module('ui.tinymce', []) // Update model on button click ed.on('ExecCommand', function (e) { ed.save(); - ngModel.$setViewValue(elm.val()); - if (!scope.$$phase) { - scope.$apply(); - } + updateView(); }); // Update model on keypress ed.on('KeyUp', function (e) { ed.save(); - ngModel.$setViewValue(elm.val()); - if (!scope.$$phase) { - scope.$apply(); - } + updateView(); + }); + // Update model on change, i.e. copy/pasted text, plugins altering content + ed.on('SetContent', function (e) { + ed.save(); + updateView(); }); if (expression.setup) { scope.$eval(expression.setup);
Added ability of directive to update model on more general content changes
angular-ui_ui-tinymce
train
8d30672815109f981acb866e12aea2eeec671c58
diff --git a/openpnm/core/Base.py b/openpnm/core/Base.py index <HASH>..<HASH> 100644 --- a/openpnm/core/Base.py +++ b/openpnm/core/Base.py @@ -1201,7 +1201,8 @@ class Base(dict): temp_arr[inds] = np.nan # Lastly, convert to correct data type - if None in arrs: # If one subdomain does not have array... + found = any([True for a in arrs if a is None]) + if found: # If one subdomain does not have array... t = [a.dtype for a in arrs if a is not None] if len(set(t)) == 1: # If existing arrays are same type if t[0] == bool: # If type is bool, put False for nans
Fixing annoying numpy None comparison
PMEAL_OpenPNM
train
87549909970c0a5b66ecdb2eb95f474f61386413
diff --git a/fontbakery-check-ttf.py b/fontbakery-check-ttf.py index <HASH>..<HASH> 100755 --- a/fontbakery-check-ttf.py +++ b/fontbakery-check-ttf.py @@ -476,8 +476,10 @@ def main(): for dirname, family in metadata_to_check: ttf = {} for f in family.fonts: - #logging.error("dirname: '{}' f.filename: '{}'".format(dirname, f.filename)) - ttf[f.filename] = ttLib.TTFont(os.path.join(dirname, f.filename)) + if f.filename in ttf.keys(): + logging.error("This is a fontbakery bug. Please contact us. We may need to figure out a better hash-function for the font ProtocolBuffer message...") + else: + ttf[f.filename] = ttLib.TTFont(os.path.join(dirname, f.filename)) #----------------------------------------------------- logging.debug("The same number of glyphs across family?")
we may need to find a better hash-function for the Font metadata ProtocolBuffer message... At the moment I'm using the filename of a font entry as its hash. But that may collide at some point if we're unlucky.
googlefonts_fontbakery
train
97cdc5762f8c8227935bfca5e999e2cda8333dbf
diff --git a/lib/server.spec.js b/lib/server.spec.js index <HASH>..<HASH> 100644 --- a/lib/server.spec.js +++ b/lib/server.spec.js @@ -14,6 +14,8 @@ const { createTestServer } = require('./in-process-server-test-helpers') describe('The server', function() { let server, baseUrl before('Start the server', async function() { + // Fixes https://github.com/badges/shields/issues/2611 + this.timeout(10000) const port = await portfinder.getPortPromise() server = createTestServer({ port }) baseUrl = server.baseUrl
Add timeout for server test setup/teardown (#<I>)
badges_shields
train
b85d21b8fe9ebef52cf2b11d95edf758de1e4400
diff --git a/moto/s3/models.py b/moto/s3/models.py index <HASH>..<HASH> 100644 --- a/moto/s3/models.py +++ b/moto/s3/models.py @@ -87,10 +87,13 @@ class FakeKey(BaseModel): new_value = new_value.encode(DEFAULT_TEXT_ENCODING) self._value_buffer.write(new_value) - def copy(self, new_name=None): + def copy(self, new_name=None, new_is_versioned=None): r = copy.deepcopy(self) if new_name is not None: r.name = new_name + if new_is_versioned is not None: + r._is_versioned = new_is_versioned + r.refresh_version() return r def set_metadata(self, metadata, replace=False): @@ -973,17 +976,15 @@ class S3Backend(BaseBackend): dest_bucket = self.get_bucket(dest_bucket_name) key = self.get_key(src_bucket_name, src_key_name, version_id=src_version_id) - if dest_key_name != src_key_name: - key = key.copy(dest_key_name) - dest_bucket.keys[dest_key_name] = key - # By this point, the destination key must exist, or KeyError - if dest_bucket.is_versioned: - dest_bucket.keys[dest_key_name].refresh_version() + new_key = key.copy(dest_key_name, dest_bucket.is_versioned) + if storage is not None: - key.set_storage_class(storage) + new_key.set_storage_class(storage) if acl is not None: - key.set_acl(acl) + new_key.set_acl(acl) + + dest_bucket.keys[dest_key_name] = new_key def set_bucket_acl(self, bucket_name, acl): bucket = self.get_bucket(bucket_name) diff --git a/tests/test_s3/test_s3.py b/tests/test_s3/test_s3.py index <HASH>..<HASH> 100644 --- a/tests/test_s3/test_s3.py +++ b/tests/test_s3/test_s3.py @@ -1531,6 +1531,23 @@ def test_boto3_copy_object_with_versioning(): @mock_s3 +def test_boto3_copy_object_from_unversioned_to_versioned_bucket(): + client = boto3.client('s3', region_name='us-east-1') + + client.create_bucket(Bucket='src', CreateBucketConfiguration={'LocationConstraint': 'eu-west-1'}) + client.create_bucket(Bucket='dest', CreateBucketConfiguration={'LocationConstraint': 'eu-west-1'}) + client.put_bucket_versioning(Bucket='dest', VersioningConfiguration={'Status': 'Enabled'}) + + client.put_object(Bucket='src', Key='test', Body=b'content') + + obj2_version_new = client.copy_object(CopySource={'Bucket': 'src', 'Key': 'test'}, Bucket='dest', Key='test') \ + .get('VersionId') + + # VersionId should be present in the response + obj2_version_new.should_not.equal(None) + + +@mock_s3 def test_boto3_deleted_versionings_list(): client = boto3.client('s3', region_name='us-east-1')
Fixed copy-object from unversioned bucket to versioned bucket The response of the copy-object operation was missing VersionId property when source bucket is not versioned.
spulec_moto
train
817b1c6fc6c9577a9133c9f191cba28f05a8cc0e
diff --git a/lib/grit_adapter.rb b/lib/grit_adapter.rb index <HASH>..<HASH> 100644 --- a/lib/grit_adapter.rb +++ b/lib/grit_adapter.rb @@ -25,7 +25,7 @@ module DTK end end - attr_reader :branch + attr_reader :branch,:repo_dir def self.clone(target_repo_dir,git_server_url,opts={}) if File.directory?(target_repo_dir) diff --git a/lib/grit_adapter/file_access.rb b/lib/grit_adapter/file_access.rb index <HASH>..<HASH> 100644 --- a/lib/grit_adapter/file_access.rb +++ b/lib/grit_adapter/file_access.rb @@ -1,4 +1,4 @@ -module DTK::Common; class GritAdapter +module DTK; module Common; class GritAdapter class FileAccess < self require File.expand_path('file_access/status', File.dirname(__FILE__)) require File.expand_path('file_access/diff', File.dirname(__FILE__)) @@ -27,9 +27,9 @@ module DTK::Common; class GritAdapter end end - def fetch_branch(remote="origin") + def fetch(remote="origin") chdir_and_checkout do - git_command(:fetch,remote,@branch) + git_command(:fetch,remote) end end @@ -47,6 +47,40 @@ module DTK::Common; class GritAdapter :email => "dtk@reactor8.com" } + #returns :equal, :local_behind, :local_ahead, or :branchpoint + #type can be :remote_branch or :local_branch + def ret_merge_relationship(type,ref,opts={}) + if (type == :remote_branch and opts[:fetch_if_needed]) + #TODO: this fetches all branches on the remote; see if anyway to just fetch a specfic branch + #ref will be of form remote_name/branch + fetch(ref.split("/").first) + end + other_grit_ref = + case type + when :remote_branch + @grit_repo.remotes.find{|r|r.name == ref} + when :local_branch + @grit_repo.heads.find{|r|r.name == ref} + else + raise Error.new("Illegal type parameter (#{type}) passed to ret_merge_relationship") + end + unless other_grit_ref + raise Error.new("Cannot find git ref (#{ref})") + end + + other_sha = other_grit_ref.commit.id + local_sha = @grit_repo.heads.find{|r|r.name == @branch}.commit.id + + if other_sha == local_sha then :equal + else + merge_sha = git_command(:merge_base,@branch,ref) + if merge_sha == local_sha then :local_behind + elsif merge_sha == other_sha then :local_ahead + else :branchpoint + end + end + end + private def qualified_path(file_rel_path) "#{@repo_dir}/#{file_rel_path}" @@ -67,5 +101,5 @@ module DTK::Common; class GritAdapter end end end -end;end +end;end;end diff --git a/lib/grit_adapter/object_access.rb b/lib/grit_adapter/object_access.rb index <HASH>..<HASH> 100644 --- a/lib/grit_adapter/object_access.rb +++ b/lib/grit_adapter/object_access.rb @@ -1,6 +1,6 @@ module DTK::Common; class GritAdapter class ObjectAccess < self - def initialize(repo_dir,branch='master') + def initialize(repo_dir,branch=nil) super @grit_index = @grit_repo.index end
updates for dtk-client 'push from clone'
rich-dtk_dtk-common
train
b1c6a2be567d3e17e1ba7cf1c12eb6c524d91afa
diff --git a/tests/test_phase_change.py b/tests/test_phase_change.py index <HASH>..<HASH> 100644 --- a/tests/test_phase_change.py +++ b/tests/test_phase_change.py @@ -33,8 +33,6 @@ from thermo.phase_change import COOLPROP, VDI_PPDS, CLAPEYRON, LIU, ALIBAKHSHI, def test_EnthalpyVaporization(): EtOH = EnthalpyVaporization(Tb=351.39, Tc=514.0, Pc=6137000.0, omega=0.635, similarity_variable=0.1954, Psat=7872.2, Zg=0.9633, Zl=0.0024, CASRN='64-17-5') - EtOH.method = COOLPROP - assert_close(EtOH.T_dependent_property(305), 42062.9371631488) EtOH.method = VDI_PPDS assert_close(EtOH.T_dependent_property(305), 42099.23631527565) EtOH.method = CLAPEYRON @@ -79,10 +77,10 @@ def test_EnthalpyVaporization(): EtOH = EnthalpyVaporization(CASRN='64-17-5', Tc=514.0) Hvap_calc = [] - for i in ['GHARAGHEIZI_HVAP_298', 'CRC_HVAP_298', 'VDI_TABULAR', 'COOLPROP']: + for i in ['GHARAGHEIZI_HVAP_298', 'CRC_HVAP_298', 'VDI_TABULAR']: EtOH.method = i Hvap_calc.append(EtOH.T_dependent_property(310.0)) - Hvap_exp = [41304.19234346344, 41421.6450231131, 41857.962450207546, 41796.56243049473] + Hvap_exp = [41304.19234346344, 41421.6450231131, 41857.962450207546] assert_close1d(Hvap_calc, Hvap_exp) # Test Clapeyron, without Zl @@ -109,10 +107,20 @@ def test_EnthalpyVaporization(): assert EnthalpyVaporization.from_json(EtOH.as_json()) == EtOH - +@pytest.mark.CoolProp @pytest.mark.meta_T_dept -def test_EnthalpyVaporization_Watson_extrapolation(): - from thermo.phase_change import COOLPROP +def test_EnthalpyVaporization_CoolProp(): + EtOH = EnthalpyVaporization(Tb=351.39, Tc=514.0, Pc=6137000.0, omega=0.635, similarity_variable=0.1954, Psat=7872.2, Zg=0.9633, Zl=0.0024, CASRN='64-17-5') + + EtOH.method = COOLPROP + assert_close(EtOH.T_dependent_property(305), 42062.9371631488) + + # Reduced property inputs + EtOH = EnthalpyVaporization(CASRN='64-17-5', Tc=514.0) + EtOH.method = COOLPROP + assert_close(EtOH.T_dependent_property(305), 41796.56243049473) + + # Watson extrapolation obj = EnthalpyVaporization(CASRN='7732-18-5', Tb=373.124, Tc=647.14, Pc=22048320.0, omega=0.344, similarity_variable=0.16652530518537598, Psat=3167, Zl=1.0, Zg=0.96, extrapolation='Watson') @@ -125,6 +133,23 @@ def test_EnthalpyVaporization_Watson_extrapolation(): assert_close(obj.solve_property(1e-20), 647.13999999983) assert EnthalpyVaporization.from_json(obj.as_json()) == obj +@pytest.mark.meta_T_dept +def test_EnthalpyVaporization_Watson_extrapolation(): + obj = EnthalpyVaporization(CASRN='7732-18-5', Tb=373.124, Tc=647.14, Pc=22048320.0, omega=0.344, + similarity_variable=0.16652530518537598, Psat=3167, Zl=1.0, Zg=0.96, + extrapolation='Watson') + + # Data from CoolProp + Ts = [300, 400, 500, 600] + Hvaps = [43908.418874478055, 39322.84456586401, 32914.75657594899, 21122.090961998296] + obj.add_tabular_data(Ts=Ts, properties=Hvaps, name='test0') + obj.method = 'test0' + assert 0 == obj(obj.Tc) + assert_close(obj.solve_property(1), 647.1399999998296) + + obj.solve_property(5e4) + obj.solve_property(1e-20) + assert EnthalpyVaporization.from_json(obj.as_json()) == obj @pytest.mark.meta_T_dept def test_EnthalpySublimation_no_numpy(): diff --git a/thermo/phase_change.py b/thermo/phase_change.py index <HASH>..<HASH> 100644 --- a/thermo/phase_change.py +++ b/thermo/phase_change.py @@ -467,7 +467,7 @@ class EnthalpyVaporization(TDependentProperty): ''' if method == POLY_FIT: if T > self.poly_fit_Tc: - Hvap = 0 + Hvap = 0.0 else: Hvap = horner(self.poly_fit_coeffs, log(1.0 - T/self.poly_fit_Tc)) diff --git a/thermo/utils.py b/thermo/utils.py index <HASH>..<HASH> 100644 --- a/thermo/utils.py +++ b/thermo/utils.py @@ -1677,7 +1677,6 @@ class TDependentProperty(object): self.all_methods.add(name) self.method = name - self.extrapolation = self.extrapolation def solve_property(self, goal): r'''Method to solve for the temperature at which a property is at a
Remove unnecessary call to set extrapolation method and update heat of vaporization for pypy compatibility
CalebBell_thermo
train
5043af4d0250b32b9312e203faa80a2ca412443e
diff --git a/spec/cb/cb_application_external_api_spec.rb b/spec/cb/cb_application_external_api_spec.rb index <HASH>..<HASH> 100755 --- a/spec/cb/cb_application_external_api_spec.rb +++ b/spec/cb/cb_application_external_api_spec.rb @@ -3,27 +3,52 @@ require 'spec_helper' module Cb describe Cb::ApplicationExternalApi do - context '.submit_app' do - it 'should send external info to api', :vcr => {:cassette_name => 'job/application_external/submit_app'} do - search = Cb.job_search_criteria.location('Atlanta, GA').radius(10).keywords('customcodes:CBDEV_applyurlyes').search() - job = search[Random.new.rand(0..24)] - app = Cb::CbApplicationExternal.new({:job_did => job.did, :email => 'bogus@bogus.org', :ipath => 'bogus', :site_id => 'bogus'}) + context '#submit_app' do + before :each do + @app = Cb::CbApplicationExternal.new({:job_did => 'bogus-did', :site_id => 'bogus', :email => 'bogus', + :ipath => 'bogus-ipath', :apply_url => 'jobs.baconparadise.com'}) + end + + def stub_app_submission_to_return(response_content) + stub_request(:post, uri_stem(Cb.configuration.uri_application_external)).with(:body => anything). + to_return(:body => response_content.to_json) + end + + def assert_blank_apply_url(app) + app.apply_url.blank?.should eq true + end - app = Cb.application_external.submit_app(app) - expect(app.apply_url.length).to be >= 1 - expect(app.api_error).to be == false + it 'raises exception for incorrent input type (anything other than Cb::CbApplication)' do + expect { Cb::ApplicationExternalApi.submit_app(Object.new) }.to raise_error Cb::IncomingParamIsWrongTypeException end - it 'should get invalid did error from api', :vcr => {:cassette_name => 'job/application_external/submit_app_error'} do - app = Cb::CbApplicationExternal.new({:job_did => 'bogus-did', :email => 'bogus@bogus.org', :ipath => 'bogus', :site_id => 'bogus'}) + context 'when response hash contains enough data' do + before :each do + stub_app_submission_to_return({ 'ApplyUrl' => 'http://delicious.baconmobile.com' }) + end - app = Cb.application_external.submit_app(app) - expect(app.apply_url.blank?).to be == true - expect(app.cb_response.errors.length).to be >= 1 - app.api_error.should == false - expect(app.api_error).to be == false + it 'the same application object that it took as input is returned' do + app = Cb::ApplicationExternalApi.submit_app(@app) + app.should eq @app + app.object_id.should eq @app.object_id + end + + it 'sets apply_url on the application' do + app = Cb::ApplicationExternalApi.submit_app(@app) + app.apply_url.should eq 'http://delicious.baconmobile.com' + end + end + + context 'when missing ApplyUrl field' do + it 'app redirect url is set to a blank string' do + stub_app_submission_to_return(Hash.new) + app = Cb::ApplicationExternalApi.submit_app(@app) + app.apply_url.blank?.should eq true + end end - end + + end # #submit_app + end end \ No newline at end of file diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index <HASH>..<HASH> 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -1,12 +1,15 @@ -require 'rubygems' require 'simplecov' -require 'cb' -require 'webmock/rspec' - SimpleCov.start do add_filter '/spec/' + add_group 'models', 'lib/cb/models' + add_group 'clients', 'lib/cb/clients' + add_group 'criteria', 'lib/cb/criteria' + add_group 'responses', 'lib/cb/responses' + add_group 'utils', 'lib/cb/utils' end +require 'rubygems' +require 'cb' require 'vcr' require 'webmock/rspec'
redoing simplecov config with some grouping, bringing external application API client tests off of VCR
careerbuilder_ruby-cb-api
train
855346e5b1ed219f7d897feaaac1767ed4790b4f
diff --git a/metrique/client/result.py b/metrique/client/result.py index <HASH>..<HASH> 100644 --- a/metrique/client/result.py +++ b/metrique/client/result.py @@ -325,9 +325,8 @@ class Result(DataFrame): specified date. ''' starts = self._start.groupby(self._oid).min() - ids = starts[starts > dt].index.tolist() - res = self[self._oid.apply(lambda v: v in ids)] - return res + oids = starts[starts > dt].index.tolist() + return self[self._oid.apply(lambda v: v in oids)] @filtered def filter(self, mask):
result - started_after refactor
kejbaly2_metrique
train
75f8ce3a05a39d251cf627951917439f102a4199
diff --git a/tasks/init.js b/tasks/init.js index <HASH>..<HASH> 100644 --- a/tasks/init.js +++ b/tasks/init.js @@ -458,7 +458,11 @@ module.exports = function(grunt) { // Fail task if errors were logged. if (grunt.task.current.errorCount) { taskDone(false); } // Otherwise, print a success message. - grunt.log.writeln().writeln('Initialized from template "' + name + '".'); + grunt.log.subhead('Initialized from template "' + name + '".'); + // Show any template-specific notes. + if (initTemplate.after) { + grunt.log.writelns(initTemplate.after); + } // All done! taskDone(); }].concat(args));
Templates may now export an "after" property to display a message… after.
gruntjs_grunt-init
train
030023f5aab128ce0cbe7aa24b83de5ec248cc04
diff --git a/src/core/a-entity.js b/src/core/a-entity.js index <HASH>..<HASH> 100644 --- a/src/core/a-entity.js +++ b/src/core/a-entity.js @@ -309,8 +309,10 @@ var proto = Object.create(ANode.prototype, { var isComponentDefined; componentInfo = utils.split(attrName, MULTIPLE_COMPONENT_DELIMITER); - componentId = componentInfo[1]; componentName = componentInfo[0]; + componentId = componentInfo.length > 2 + ? componentInfo.slice(1).join('__') + : componentInfo[1]; // Not a registered component. if (!COMPONENTS[componentName]) { return; } diff --git a/tests/core/component.test.js b/tests/core/component.test.js index <HASH>..<HASH> 100644 --- a/tests/core/component.test.js +++ b/tests/core/component.test.js @@ -547,6 +547,12 @@ suite('Component', function () { }, Error); assert.notOk('my__component' in components); }); + + test('can have underscore in component id', function () { + AFRAME.registerComponent('test', {multiple: true}); + el.setAttribute('test__foo__bar', ''); + assert.equal(el.components['test__foo__bar'].id, 'foo__bar'); + }); }); suite('schema', function () {
allow double underscores in component id (#<I>)
aframevr_aframe
train
eeaf2e52e7519fb7d3c2fe6f6e52a08f0501ca7b
diff --git a/presto-main/src/main/java/com/facebook/presto/sql/planner/EqualityInference.java b/presto-main/src/main/java/com/facebook/presto/sql/planner/EqualityInference.java index <HASH>..<HASH> 100644 --- a/presto-main/src/main/java/com/facebook/presto/sql/planner/EqualityInference.java +++ b/presto-main/src/main/java/com/facebook/presto/sql/planner/EqualityInference.java @@ -67,7 +67,7 @@ public class EqualityInference return ComparisonChain.start() .compare(DependencyExtractor.extractAll(expression1).size(), DependencyExtractor.extractAll(expression2).size()) .compare(SubExpressionExtractor.extract(expression1).size(), SubExpressionExtractor.extract(expression2).size()) - .compare(expression1, expression2, Ordering.arbitrary()) + .compare(expression1.toString(), expression2.toString()) .result(); } });
Make CANONICAL_ORDERING use string comparison as a last resort This makes plans more deterministic since they don't depend on JVM context.
prestodb_presto
train
35cbcb860a05625c35153f08d60a259be6dff5bb
diff --git a/model/annotationHelpers.js b/model/annotationHelpers.js index <HASH>..<HASH> 100644 --- a/model/annotationHelpers.js +++ b/model/annotationHelpers.js @@ -1,4 +1,3 @@ -import forEach from '../util/forEach' import isArray from '../util/isArray' import uuid from '../util/uuid' @@ -20,8 +19,9 @@ export default { function insertedText(doc, coordinate, length) { if (!length) return; var index = doc.getIndex('annotations'); - var annotations = index.get(coordinate.path); - forEach(annotations, function(anno) { + var annotations = index.get(coordinate.path) + for (let i = 0; i < annotations.length; i++) { + let anno = annotations[i] var pos = coordinate.offset; var start = anno.start.offset; var end = anno.end.offset; @@ -43,7 +43,7 @@ function insertedText(doc, coordinate, length) { if (newEnd !== end) { doc.set([anno.id, 'end', 'offset'], newEnd); } - }); + } // TODO: fix support for container annotations // // same for container annotation anchors @@ -71,7 +71,8 @@ function deletedText(doc, path, startOffset, endOffset) { var index = doc.getIndex('annotations'); var annotations = index.get(path); var length = endOffset - startOffset; - forEach(annotations, function(anno) { + for (let i = 0; i < annotations.length; i++) { + let anno = annotations[i] var pos1 = startOffset; var pos2 = endOffset; var start = anno.start.offset; @@ -103,7 +104,7 @@ function deletedText(doc, path, startOffset, endOffset) { } } } - }); + } // TODO: fix support for container annotations // // same for container annotation anchors // index = doc.getIndex('container-annotation-anchors'); @@ -148,37 +149,38 @@ function deletedText(doc, path, startOffset, endOffset) { function transferAnnotations(doc, path, offset, newPath, newOffset) { var index = doc.getIndex('annotations'); var annotations = index.get(path, offset); - forEach(annotations, function(a) { + for (let i = 0; i < annotations.length; i++) { + let a = annotations[i] var isInside = (offset > a.start.offset && offset < a.end.offset); var start = a.start.offset; var end = a.end.offset; - var newStart, newEnd; // 1. if the cursor is inside an annotation it gets either split or truncated if (isInside) { // create a new annotation if the annotation is splittable if (a.canSplit()) { - var newAnno = a.toJSON(); + let newAnno = a.toJSON(); newAnno.id = uuid(a.type + "_"); - newAnno.start.offset = newOffset; - newAnno.end.offset = newOffset + a.end.offset - offset; - newAnno.path = newPath; + newAnno.start.path = newPath + newAnno.start.offset = newOffset + newAnno.end.path = newPath + newAnno.end.offset = newOffset + a.end.offset - offset doc.create(newAnno); } // in either cases truncate the first part - newStart = a.start.offset; - newEnd = offset; + let newStartOffset = a.start.offset; + let newEndOffset = offset; // if after truncate the anno is empty, delete it - if (newEnd === newStart) { + if (newEndOffset === newStartOffset) { doc.delete(a.id); } // ... otherwise update the range else { // TODO: Use coordintate ops! - if (newStart !== start) { - doc.set([a.id, 'start', 'offset'], newStart); + if (newStartOffset !== start) { + doc.set([a.id, 'start', 'offset'], newStartOffset); } - if (newEnd !== end) { - doc.set([a.id, 'end', 'offset'], newEnd); + if (newEndOffset !== end) { + doc.set([a.id, 'end', 'offset'], newEndOffset); } } } @@ -187,14 +189,12 @@ function transferAnnotations(doc, path, offset, newPath, newOffset) { // TODO: Use coordintate ops! // Note: we are preserving the annotation so that anything which is connected to the annotation // remains valid. - newStart = newOffset + a.start.offset - offset; - newEnd = newOffset + a.end.offset - offset; doc.set([a.id, 'start', 'path'], newPath); - doc.set([a.id, 'start', 'offset'], newStart); + doc.set([a.id, 'start', 'offset'], newOffset + a.start.offset - offset); doc.set([a.id, 'end', 'path'], newPath); - doc.set([a.id, 'end', 'offset'], newEnd); + doc.set([a.id, 'end', 'offset'], newOffset + a.end.offset - offset); } - }); + } // TODO: fix support for container annotations // // same for container annotation anchors
Fix regressions in annotationHelpers.
substance_substance
train
9809bea258d4523cfcb9e9a14ed38dc093982412
diff --git a/astrobase/lcproc.py b/astrobase/lcproc.py index <HASH>..<HASH> 100644 --- a/astrobase/lcproc.py +++ b/astrobase/lcproc.py @@ -391,7 +391,12 @@ def lclist_parallel_worker(task): # read the light curve in lcdict = readerfunc(lcf) - if len(lcdict) == 2: + + # this should handle lists/tuples being returned by readerfunc + # we assume that the first element is the actual lcdict + # FIXME: figure out how to not need this assumption + if ( (isinstance(lcdict, list) or isinstance(lcdict, tuple)) and + (isinstance(lcdict[0], dict)) ): lcdict = lcdict[0] # insert all of the columns @@ -1218,7 +1223,12 @@ def timebinlc(lcfile, # get the LC into a dict lcdict = readerfunc(lcfile) - if isinstance(lcdict, tuple) and isinstance(lcdict[0],dict): + + # this should handle lists/tuples being returned by readerfunc + # we assume that the first element is the actual lcdict + # FIXME: figure out how to not need this assumption + if ( (isinstance(lcdict, list) or isinstance(lcdict, tuple)) and + (isinstance(lcdict[0], dict)) ): lcdict = lcdict[0] # skip already binned light curves @@ -1439,7 +1449,12 @@ def get_varfeatures(lcfile, # get the LC into a dict lcdict = readerfunc(lcfile) - if isinstance(lcdict, tuple) and isinstance(lcdict[0],dict): + + # this should handle lists/tuples being returned by readerfunc + # we assume that the first element is the actual lcdict + # FIXME: figure out how to not need this assumption + if ( (isinstance(lcdict, list) or isinstance(lcdict, tuple)) and + (isinstance(lcdict[0], dict)) ): lcdict = lcdict[0] resultdict = {'objectid':lcdict['objectid'], @@ -1809,14 +1824,25 @@ def get_periodicfeatures(pfpickle, # get the object LC into a dict lcdict = readerfunc(lcfile) - if isinstance(lcdict, tuple) and isinstance(lcdict[0],dict): + + # this should handle lists/tuples being returned by readerfunc + # we assume that the first element is the actual lcdict + # FIXME: figure out how to not need this assumption + if ( (isinstance(lcdict, list) or isinstance(lcdict, tuple)) and + (isinstance(lcdict[0], dict)) ): lcdict = lcdict[0] # get the nbr object LC into a dict if there is one if nbrlcf is not None: nbrlcdict = readerfunc(nbrlcf) - if isinstance(nbrlcdict, tuple) and isinstance(nbrlcdict[0],dict): + + # this should handle lists/tuples being returned by readerfunc + # we assume that the first element is the actual lcdict + # FIXME: figure out how to not need this assumption + if ( (isinstance(nbrlcdict, list) or + isinstance(nbrlcdict, tuple)) and + (isinstance(nbrlcdict[0], dict)) ): nbrlcdict = nbrlcdict[0] # this will be the output file @@ -2391,7 +2417,12 @@ def get_starfeatures(lcfile, # get the LC into a dict lcdict = readerfunc(lcfile) - if isinstance(lcdict, tuple) and isinstance(lcdict[0],dict): + + # this should handle lists/tuples being returned by readerfunc + # we assume that the first element is the actual lcdict + # FIXME: figure out how to not need this assumption + if ( (isinstance(lcdict, list) or isinstance(lcdict, tuple)) and + (isinstance(lcdict[0], dict)) ): lcdict = lcdict[0] resultdict = {'objectid':lcdict['objectid'], @@ -3392,7 +3423,12 @@ def runpf(lcfile, # get the LC into a dict lcdict = readerfunc(lcfile) - if isinstance(lcdict, tuple) and isinstance(lcdict[0],dict): + + # this should handle lists/tuples being returned by readerfunc + # we assume that the first element is the actual lcdict + # FIXME: figure out how to not need this assumption + if ( (isinstance(lcdict, list) or isinstance(lcdict, tuple)) and + (isinstance(lcdict[0], dict)) ): lcdict = lcdict[0] outfile = os.path.join(outdir, 'periodfinding-%s.pkl' % @@ -3847,7 +3883,12 @@ def update_checkplotdict_nbrlcs( continue lcdict = readerfunc(lcfpath) - if isinstance(lcdict, tuple) and isinstance(lcdict[0],dict): + + # this should handle lists/tuples being returned by readerfunc + # we assume that the first element is the actual lcdict + # FIXME: figure out how to not need this assumption + if ( (isinstance(lcdict, list) or isinstance(lcdict, tuple)) and + (isinstance(lcdict[0], dict)) ): lcdict = lcdict[0] @@ -4079,7 +4120,12 @@ def runcp(pfpickle, lcdict = readerfunc(lcfpath) - if isinstance(lcdict, tuple) and isinstance(lcdict[0], dict): + + # this should handle lists/tuples being returned by readerfunc + # we assume that the first element is the actual lcdict + # FIXME: figure out how to not need this assumption + if ( (isinstance(lcdict, list) or isinstance(lcdict, tuple)) and + (isinstance(lcdict[0], dict)) ): lcdict = lcdict[0] # normalize using the special function if specified
lcproc: fix readerfunc apply if sequences are returned
waqasbhatti_astrobase
train
e939107ce46d6852dea73193626fdf70d316ac18
diff --git a/lenstronomy/LensModel/Profiles/cnfw.py b/lenstronomy/LensModel/Profiles/cnfw.py index <HASH>..<HASH> 100644 --- a/lenstronomy/LensModel/Profiles/cnfw.py +++ b/lenstronomy/LensModel/Profiles/cnfw.py @@ -4,6 +4,7 @@ import numpy as np from lenstronomy.LensModel.Profiles.nfw import NFW import warnings + class CNFW(object): """ this class computes the lensing quantities of a cored NFW profile: @@ -267,6 +268,7 @@ class CNFW(object): x = R / Rs b = r_core * Rs ** -1 + b = max(b, 0.000001) gx = self._G(x, b) a = 4*rho0*Rs*gx/x**2 @@ -296,6 +298,7 @@ class CNFW(object): R[R <= c] = c x = R * Rs ** -1 b = r_core * Rs ** -1 + b = max(b, c) gx = self._G(x, b) Fx = self._F(x, b) a = 2 * rho0 * Rs * (2 * gx / x ** 2 - Fx) # /x #2*rho0*Rs*(2*gx/x**2 - Fx)*axis/x @@ -313,7 +316,8 @@ class CNFW(object): x = R / Rs b = r_core / Rs - gx = self._G(x,b) + b = max(b, 0.000001) + gx = self._G(x, b) #m_2d = 4 * np.pi* rho0 * Rs**3 * gx diff --git a/lenstronomy/LensModel/single_plane.py b/lenstronomy/LensModel/single_plane.py index <HASH>..<HASH> 100644 --- a/lenstronomy/LensModel/single_plane.py +++ b/lenstronomy/LensModel/single_plane.py @@ -79,6 +79,7 @@ class SinglePlane(object): self.func_list.append(TNFW()) elif lens_type == 'CNFW': from lenstronomy.LensModel.Profiles.cnfw import CNFW + self.func_list.append(CNFW()) elif lens_type == 'SERSIC': from lenstronomy.LensModel.Profiles.sersic import Sersic self.func_list.append(Sersic()) @@ -135,9 +136,6 @@ class SinglePlane(object): elif lens_type == 'coreBURKERT': from lenstronomy.LensModel.Profiles.coreBurkert import coreBurkert self.func_list.append(coreBurkert()) - elif lens_type == 'CNFW': - from lenstronomy.LensModel.Profiles.cnfw import CNFW - self.func_list.append(CNFW()) else: raise ValueError('%s is not a valid lens model' % lens_type)
improve stability in cnfw
sibirrer_lenstronomy
train
c64e39ea6141fbe95f957d1d3eaf1e20e3f122fd
diff --git a/server/jetstream_api.go b/server/jetstream_api.go index <HASH>..<HASH> 100644 --- a/server/jetstream_api.go +++ b/server/jetstream_api.go @@ -258,7 +258,7 @@ func (s *Server) jsCreateTemplateRequest(sub *subscription, c *client, subject, } templateName := subjectToken(subject, 2) if templateName != cfg.Name { - s.sendInternalAccountMsg(c.acc, reply, protoErr("template name in subject does not match request")) + s.sendAPIResponse(c, subject, reply, string(msg), protoErr("template name in subject does not match request")) return } @@ -361,12 +361,12 @@ func (s *Server) jsCreateStreamRequest(sub *subscription, c *client, subject, re } var cfg StreamConfig if err := json.Unmarshal(msg, &cfg); err != nil { - s.sendInternalAccountMsg(c.acc, reply, JetStreamBadRequest) + s.sendAPIResponse(c, subject, reply, string(msg), JetStreamBadRequest) return } streamName := subjectToken(subject, 2) if streamName != cfg.Name { - s.sendInternalAccountMsg(c.acc, reply, protoErr("stream name in subject does not match request")) + s.sendAPIResponse(c, subject, reply, string(msg), protoErr("stream name in subject does not match request")) return } @@ -388,12 +388,12 @@ func (s *Server) jsStreamUpdateRequest(sub *subscription, c *client, subject, re } var cfg StreamConfig if err := json.Unmarshal(msg, &cfg); err != nil { - s.sendInternalAccountMsg(c.acc, reply, JetStreamBadRequest) + s.sendAPIResponse(c, subject, reply, string(msg), JetStreamBadRequest) return } streamName := subjectToken(subject, 2) if streamName != cfg.Name { - s.sendInternalAccountMsg(c.acc, reply, protoErr("stream name in subject does not match request")) + s.sendAPIResponse(c, subject, reply, string(msg), protoErr("stream name in subject does not match request")) return } mset, err := c.acc.LookupStream(streamName)
send audit advisories for all api errors
nats-io_gnatsd
train
8e831e8b53d270e84d28773809a2417ffe45ee54
diff --git a/app/controllers/api/v1/ping_controller.rb b/app/controllers/api/v1/ping_controller.rb index <HASH>..<HASH> 100644 --- a/app/controllers/api/v1/ping_controller.rb +++ b/app/controllers/api/v1/ping_controller.rb @@ -36,6 +36,6 @@ class Api::V1::PingController < Api::V1::ApiController api :GET, "/version", "Shows name and version information" description "This service is only available for authenticated users" def version - respond_for_show :resource => { :name => Katello.config.app_name, :version => Katello.config.katello_version } + respond_for_show :resource => { :name => Katello.config.app_mode, :version => Katello.config.katello_version } end end diff --git a/spec/controllers/api/v1/ping_controller_spec.rb b/spec/controllers/api/v1/ping_controller_spec.rb index <HASH>..<HASH> 100644 --- a/spec/controllers/api/v1/ping_controller_spec.rb +++ b/spec/controllers/api/v1/ping_controller_spec.rb @@ -92,5 +92,19 @@ describe Api::V1::PingController do end + context "version" do + + it "should get back the correct app name for katello", :katello => true do + get :version + response.body.should == { :name => "katello", :version => Katello.config.katello_version }.to_json + end + + it "should get back the correct app name for headpin", :headpin => true do + get :version + response.body.should == { :name => "headpin", :version => Katello.config.katello_version }.to_json + end + + end + end
<I> - /api/version says "Headpin"
Katello_katello
train
7c803fd61711c1ebdf1f034548f40607f2a2c298
diff --git a/lib/adhearsion/initializer.rb b/lib/adhearsion/initializer.rb index <HASH>..<HASH> 100644 --- a/lib/adhearsion/initializer.rb +++ b/lib/adhearsion/initializer.rb @@ -209,7 +209,6 @@ module Adhearsion Adhearsion::Process.important_threads << Thread.new do catching_standard_errors do Adhearsion::Console.run - Adhearsion::Process.shutdown end end end
[BUGFIX] Don't try to shutdown the process when the console exits
adhearsion_adhearsion
train