file_name large_stringlengths 4 140 | prefix large_stringlengths 0 39k | suffix large_stringlengths 0 36.1k | middle large_stringlengths 0 29.4k | fim_type large_stringclasses 4
values |
|---|---|---|---|---|
OnLoadDirective.ts | namespace JustinCredible.TheWeek.Directives {
/**
* A directive for handling an element's onload event (eg an image tag).
*
* http://stackoverflow.com/questions/11868393/angularjs-inputtext-ngchange-fires-while-the-value-is-changing
*/
export class OnLoadDirective implements ng.IDirective {
//#region Injection
public static ID = "onLoad";
public static get $inject(): string[] |
constructor(
private $parse: ng.IParseService) {
// Ensure that the link function is bound to this instance so we can
// access instance variables like $parse. AngularJs normally executes
// the link function in the context of the global scope.
this.link = _.bind(this.link, this);
}
//#endregion
public restrict = "A";
public link(scope: ng.IScope, element: ng.IAugmentedJQuery, attributes: ng.IAttributes, controller: any, transclude: ng.ITranscludeFunction): void {
// Parse the value of the on-load property; this will be a function
// that the user has set on the element for example: <img on-load="load()"/>
/* tslint:disable:no-string-literal */
var fn = this.$parse(attributes["onLoad"]);
/* tslint:enable:no-string-literal */
// Subscribe to the load event of the image element.
element.on("load", (event) => {
// When the load event occurs, execute the user defined load function.
scope.$apply(() => {
fn(scope, { $event: event });
});
});
}
}
}
| {
return ["$parse"];
} | identifier_body |
OnLoadDirective.ts | namespace JustinCredible.TheWeek.Directives {
/**
* A directive for handling an element's onload event (eg an image tag).
*
* http://stackoverflow.com/questions/11868393/angularjs-inputtext-ngchange-fires-while-the-value-is-changing
*/
export class OnLoadDirective implements ng.IDirective { | public static ID = "onLoad";
public static get $inject(): string[] {
return ["$parse"];
}
constructor(
private $parse: ng.IParseService) {
// Ensure that the link function is bound to this instance so we can
// access instance variables like $parse. AngularJs normally executes
// the link function in the context of the global scope.
this.link = _.bind(this.link, this);
}
//#endregion
public restrict = "A";
public link(scope: ng.IScope, element: ng.IAugmentedJQuery, attributes: ng.IAttributes, controller: any, transclude: ng.ITranscludeFunction): void {
// Parse the value of the on-load property; this will be a function
// that the user has set on the element for example: <img on-load="load()"/>
/* tslint:disable:no-string-literal */
var fn = this.$parse(attributes["onLoad"]);
/* tslint:enable:no-string-literal */
// Subscribe to the load event of the image element.
element.on("load", (event) => {
// When the load event occurs, execute the user defined load function.
scope.$apply(() => {
fn(scope, { $event: event });
});
});
}
}
} |
//#region Injection
| random_line_split |
OnLoadDirective.ts | namespace JustinCredible.TheWeek.Directives {
/**
* A directive for handling an element's onload event (eg an image tag).
*
* http://stackoverflow.com/questions/11868393/angularjs-inputtext-ngchange-fires-while-the-value-is-changing
*/
export class OnLoadDirective implements ng.IDirective {
//#region Injection
public static ID = "onLoad";
public static get $inject(): string[] {
return ["$parse"];
}
| (
private $parse: ng.IParseService) {
// Ensure that the link function is bound to this instance so we can
// access instance variables like $parse. AngularJs normally executes
// the link function in the context of the global scope.
this.link = _.bind(this.link, this);
}
//#endregion
public restrict = "A";
public link(scope: ng.IScope, element: ng.IAugmentedJQuery, attributes: ng.IAttributes, controller: any, transclude: ng.ITranscludeFunction): void {
// Parse the value of the on-load property; this will be a function
// that the user has set on the element for example: <img on-load="load()"/>
/* tslint:disable:no-string-literal */
var fn = this.$parse(attributes["onLoad"]);
/* tslint:enable:no-string-literal */
// Subscribe to the load event of the image element.
element.on("load", (event) => {
// When the load event occurs, execute the user defined load function.
scope.$apply(() => {
fn(scope, { $event: event });
});
});
}
}
}
| constructor | identifier_name |
view.js | function BxTimelineView(oOptions) {
this._sActionsUri = oOptions.sActionUri;
this._sActionsUrl = oOptions.sActionUrl;
this._sObjName = oOptions.sObjName == undefined ? 'oTimelineView' : oOptions.sObjName;
this._iOwnerId = oOptions.iOwnerId == undefined ? 0 : oOptions.iOwnerId;
this._sAnimationEffect = oOptions.sAnimationEffect == undefined ? 'slide' : oOptions.sAnimationEffect;
this._iAnimationSpeed = oOptions.iAnimationSpeed == undefined ? 'slow' : oOptions.iAnimationSpeed;
this._aHtmlIds = oOptions.aHtmlIds == undefined ? {} : oOptions.aHtmlIds;
this._oRequestParams = oOptions.oRequestParams == undefined ? {} : oOptions.oRequestParams;
var $this = this;
$(document).ready(function() {
$this.initMasonry();
$('.bx-tl-item').resize(function() {
$this.reloadMasonry();
});
$('img.bx-tl-item-image').load(function() {
$this.reloadMasonry();
});
});
}
BxTimelineView.prototype = new BxTimelineMain();
BxTimelineView.prototype.changePage = function(oElement, iStart, iPerPage) {
this._oRequestParams.start = iStart;
this._oRequestParams.per_page = iPerPage;
this._getPosts(oElement, 'page');
};
BxTimelineView.prototype.changeFilter = function(oLink) {
var sId = $(oLink).attr('id');
this._oRequestParams.start = 0;
this._oRequestParams.filter = sId.substr(sId.lastIndexOf('-') + 1, sId.length);
this._getPosts(oLink, 'filter');
};
BxTimelineView.prototype.changeTimeline = function(oLink, iYear) {
this._oRequestParams.start = 0;
this._oRequestParams.timeline = iYear;
this._getPosts(oLink, 'timeline');
};
BxTimelineView.prototype.deletePost = function(oLink, iId) {
var $this = this;
var oView = $(this.sIdView);
var oData = this._getDefaultData();
oData['id'] = iId;
this.loadingInBlock(oLink, true);
$.post(
this._sActionsUrl + 'delete/',
oData,
function(oData) {
$this.loadingInBlock(oLink, false);
if(oData && oData.msg != undefined)
alert(oData.msg);
if(oData && oData.code == 0)
$(oLink).parents('.bx-popup-applied:first:visible').dolPopupHide();
$($this.sIdItem + oData.id).bx_anim('hide', $this._sAnimationEffect, $this._iAnimationSpeed, function() {
$(this).remove();
if(oView.find('.bx-tl-item').length != 0) {
$this.reloadMasonry();
return;
}
$this.destroyMasonry();
oView.find('.bx-tl-load-more').hide();
oView.find('.bx-tl-empty').show();
});
},
'json'
);
};
BxTimelineView.prototype.showMoreContent = function(oLink) {
$(oLink).parent('span').next('span').show().prev('span').remove();
this.reloadMasonry();
};
BxTimelineView.prototype.showPhoto = function(oLink, sUrl) {
$('#' + this._aHtmlIds['photo_popup']).dolPopupImage(sUrl, $(oLink).parent());
};
BxTimelineView.prototype.commentItem = function(oLink, sSystem, iId) {
var $this = this;
var oData = this._getDefaultData();
oData['system'] = sSystem;
oData['id'] = iId;
var oComments = $(oLink).parents('.' + this.sClassItem + ':first').find('.' + this.sClassItemComments);
if(oComments.children().length > 0) |
if(oLink)
this.loadingInItem(oLink, true);
jQuery.get (
this._sActionsUrl + 'get_comments',
oData,
function(oData) {
if(oLink)
$this.loadingInItem(oLink, false);
if(!oData.content)
return;
oComments.html($(oData.content).hide()).children(':hidden').bxTime().bx_anim('show', $this._sAnimationEffect, $this._iAnimationSpeed);
},
'json'
);
};
BxTimelineView.prototype._getPosts = function(oElement, sAction) {
var $this = this;
var oView = $(this.sIdView);
switch(sAction) {
case 'page':
this.loadingInButton(oElement, true);
break;
default:
this.loadingInBlock(oElement, true);
break;
}
jQuery.get(
this._sActionsUrl + 'get_posts/',
this._getDefaultData(),
function(oData) {
if(oData && oData.items != undefined) {
var sItems = $.trim(oData.items);
switch(sAction) {
case 'page':
$this.loadingInButton(oElement, false);
$this.appendMasonry($(sItems).bxTime());
break;
default:
$this.loadingInBlock(oElement, false);
oView.find('.' + $this.sClassItems).bx_anim('hide', $this._sAnimationEffect, $this._iAnimationSpeed, function() {
$(this).html(sItems).show().bxTime();
if($this.isMasonryEmpty()) {
$this.destroyMasonry();
return;
}
if($this.isMasonry())
$this.reloadMasonry();
else
$this.initMasonry();
});
break;
}
}
if(oData && oData.load_more != undefined)
oView.find('.' + $this.sSP + '-load-more-holder').html($.trim(oData.load_more));
if(oData && oData.back != undefined)
oView.find('.' + $this.sSP + '-back-holder').html($.trim(oData.back));
},
'json'
);
};
| {
oComments.bx_anim('toggle', this._sAnimationEffect, this._iAnimationSpeed);
return;
} | conditional_block |
view.js | function BxTimelineView(oOptions) |
BxTimelineView.prototype = new BxTimelineMain();
BxTimelineView.prototype.changePage = function(oElement, iStart, iPerPage) {
this._oRequestParams.start = iStart;
this._oRequestParams.per_page = iPerPage;
this._getPosts(oElement, 'page');
};
BxTimelineView.prototype.changeFilter = function(oLink) {
var sId = $(oLink).attr('id');
this._oRequestParams.start = 0;
this._oRequestParams.filter = sId.substr(sId.lastIndexOf('-') + 1, sId.length);
this._getPosts(oLink, 'filter');
};
BxTimelineView.prototype.changeTimeline = function(oLink, iYear) {
this._oRequestParams.start = 0;
this._oRequestParams.timeline = iYear;
this._getPosts(oLink, 'timeline');
};
BxTimelineView.prototype.deletePost = function(oLink, iId) {
var $this = this;
var oView = $(this.sIdView);
var oData = this._getDefaultData();
oData['id'] = iId;
this.loadingInBlock(oLink, true);
$.post(
this._sActionsUrl + 'delete/',
oData,
function(oData) {
$this.loadingInBlock(oLink, false);
if(oData && oData.msg != undefined)
alert(oData.msg);
if(oData && oData.code == 0)
$(oLink).parents('.bx-popup-applied:first:visible').dolPopupHide();
$($this.sIdItem + oData.id).bx_anim('hide', $this._sAnimationEffect, $this._iAnimationSpeed, function() {
$(this).remove();
if(oView.find('.bx-tl-item').length != 0) {
$this.reloadMasonry();
return;
}
$this.destroyMasonry();
oView.find('.bx-tl-load-more').hide();
oView.find('.bx-tl-empty').show();
});
},
'json'
);
};
BxTimelineView.prototype.showMoreContent = function(oLink) {
$(oLink).parent('span').next('span').show().prev('span').remove();
this.reloadMasonry();
};
BxTimelineView.prototype.showPhoto = function(oLink, sUrl) {
$('#' + this._aHtmlIds['photo_popup']).dolPopupImage(sUrl, $(oLink).parent());
};
BxTimelineView.prototype.commentItem = function(oLink, sSystem, iId) {
var $this = this;
var oData = this._getDefaultData();
oData['system'] = sSystem;
oData['id'] = iId;
var oComments = $(oLink).parents('.' + this.sClassItem + ':first').find('.' + this.sClassItemComments);
if(oComments.children().length > 0) {
oComments.bx_anim('toggle', this._sAnimationEffect, this._iAnimationSpeed);
return;
}
if(oLink)
this.loadingInItem(oLink, true);
jQuery.get (
this._sActionsUrl + 'get_comments',
oData,
function(oData) {
if(oLink)
$this.loadingInItem(oLink, false);
if(!oData.content)
return;
oComments.html($(oData.content).hide()).children(':hidden').bxTime().bx_anim('show', $this._sAnimationEffect, $this._iAnimationSpeed);
},
'json'
);
};
BxTimelineView.prototype._getPosts = function(oElement, sAction) {
var $this = this;
var oView = $(this.sIdView);
switch(sAction) {
case 'page':
this.loadingInButton(oElement, true);
break;
default:
this.loadingInBlock(oElement, true);
break;
}
jQuery.get(
this._sActionsUrl + 'get_posts/',
this._getDefaultData(),
function(oData) {
if(oData && oData.items != undefined) {
var sItems = $.trim(oData.items);
switch(sAction) {
case 'page':
$this.loadingInButton(oElement, false);
$this.appendMasonry($(sItems).bxTime());
break;
default:
$this.loadingInBlock(oElement, false);
oView.find('.' + $this.sClassItems).bx_anim('hide', $this._sAnimationEffect, $this._iAnimationSpeed, function() {
$(this).html(sItems).show().bxTime();
if($this.isMasonryEmpty()) {
$this.destroyMasonry();
return;
}
if($this.isMasonry())
$this.reloadMasonry();
else
$this.initMasonry();
});
break;
}
}
if(oData && oData.load_more != undefined)
oView.find('.' + $this.sSP + '-load-more-holder').html($.trim(oData.load_more));
if(oData && oData.back != undefined)
oView.find('.' + $this.sSP + '-back-holder').html($.trim(oData.back));
},
'json'
);
};
| {
this._sActionsUri = oOptions.sActionUri;
this._sActionsUrl = oOptions.sActionUrl;
this._sObjName = oOptions.sObjName == undefined ? 'oTimelineView' : oOptions.sObjName;
this._iOwnerId = oOptions.iOwnerId == undefined ? 0 : oOptions.iOwnerId;
this._sAnimationEffect = oOptions.sAnimationEffect == undefined ? 'slide' : oOptions.sAnimationEffect;
this._iAnimationSpeed = oOptions.iAnimationSpeed == undefined ? 'slow' : oOptions.iAnimationSpeed;
this._aHtmlIds = oOptions.aHtmlIds == undefined ? {} : oOptions.aHtmlIds;
this._oRequestParams = oOptions.oRequestParams == undefined ? {} : oOptions.oRequestParams;
var $this = this;
$(document).ready(function() {
$this.initMasonry();
$('.bx-tl-item').resize(function() {
$this.reloadMasonry();
});
$('img.bx-tl-item-image').load(function() {
$this.reloadMasonry();
});
});
} | identifier_body |
view.js | function BxTimelineView(oOptions) {
this._sActionsUri = oOptions.sActionUri;
this._sActionsUrl = oOptions.sActionUrl;
this._sObjName = oOptions.sObjName == undefined ? 'oTimelineView' : oOptions.sObjName;
this._iOwnerId = oOptions.iOwnerId == undefined ? 0 : oOptions.iOwnerId;
this._sAnimationEffect = oOptions.sAnimationEffect == undefined ? 'slide' : oOptions.sAnimationEffect;
this._iAnimationSpeed = oOptions.iAnimationSpeed == undefined ? 'slow' : oOptions.iAnimationSpeed;
this._aHtmlIds = oOptions.aHtmlIds == undefined ? {} : oOptions.aHtmlIds;
this._oRequestParams = oOptions.oRequestParams == undefined ? {} : oOptions.oRequestParams;
var $this = this;
$(document).ready(function() {
$this.initMasonry();
$('.bx-tl-item').resize(function() {
$this.reloadMasonry();
});
$('img.bx-tl-item-image').load(function() {
$this.reloadMasonry();
});
});
}
BxTimelineView.prototype = new BxTimelineMain();
BxTimelineView.prototype.changePage = function(oElement, iStart, iPerPage) {
this._oRequestParams.start = iStart;
this._oRequestParams.per_page = iPerPage;
this._getPosts(oElement, 'page');
};
BxTimelineView.prototype.changeFilter = function(oLink) {
var sId = $(oLink).attr('id');
this._oRequestParams.start = 0;
this._oRequestParams.filter = sId.substr(sId.lastIndexOf('-') + 1, sId.length);
this._getPosts(oLink, 'filter');
};
BxTimelineView.prototype.changeTimeline = function(oLink, iYear) {
this._oRequestParams.start = 0;
this._oRequestParams.timeline = iYear;
this._getPosts(oLink, 'timeline');
};
BxTimelineView.prototype.deletePost = function(oLink, iId) {
var $this = this;
var oView = $(this.sIdView);
var oData = this._getDefaultData();
oData['id'] = iId;
this.loadingInBlock(oLink, true);
$.post(
this._sActionsUrl + 'delete/',
oData,
function(oData) {
$this.loadingInBlock(oLink, false);
if(oData && oData.msg != undefined)
alert(oData.msg);
if(oData && oData.code == 0)
$(oLink).parents('.bx-popup-applied:first:visible').dolPopupHide();
$($this.sIdItem + oData.id).bx_anim('hide', $this._sAnimationEffect, $this._iAnimationSpeed, function() {
$(this).remove();
if(oView.find('.bx-tl-item').length != 0) {
$this.reloadMasonry(); | oView.find('.bx-tl-empty').show();
});
},
'json'
);
};
BxTimelineView.prototype.showMoreContent = function(oLink) {
$(oLink).parent('span').next('span').show().prev('span').remove();
this.reloadMasonry();
};
BxTimelineView.prototype.showPhoto = function(oLink, sUrl) {
$('#' + this._aHtmlIds['photo_popup']).dolPopupImage(sUrl, $(oLink).parent());
};
BxTimelineView.prototype.commentItem = function(oLink, sSystem, iId) {
var $this = this;
var oData = this._getDefaultData();
oData['system'] = sSystem;
oData['id'] = iId;
var oComments = $(oLink).parents('.' + this.sClassItem + ':first').find('.' + this.sClassItemComments);
if(oComments.children().length > 0) {
oComments.bx_anim('toggle', this._sAnimationEffect, this._iAnimationSpeed);
return;
}
if(oLink)
this.loadingInItem(oLink, true);
jQuery.get (
this._sActionsUrl + 'get_comments',
oData,
function(oData) {
if(oLink)
$this.loadingInItem(oLink, false);
if(!oData.content)
return;
oComments.html($(oData.content).hide()).children(':hidden').bxTime().bx_anim('show', $this._sAnimationEffect, $this._iAnimationSpeed);
},
'json'
);
};
BxTimelineView.prototype._getPosts = function(oElement, sAction) {
var $this = this;
var oView = $(this.sIdView);
switch(sAction) {
case 'page':
this.loadingInButton(oElement, true);
break;
default:
this.loadingInBlock(oElement, true);
break;
}
jQuery.get(
this._sActionsUrl + 'get_posts/',
this._getDefaultData(),
function(oData) {
if(oData && oData.items != undefined) {
var sItems = $.trim(oData.items);
switch(sAction) {
case 'page':
$this.loadingInButton(oElement, false);
$this.appendMasonry($(sItems).bxTime());
break;
default:
$this.loadingInBlock(oElement, false);
oView.find('.' + $this.sClassItems).bx_anim('hide', $this._sAnimationEffect, $this._iAnimationSpeed, function() {
$(this).html(sItems).show().bxTime();
if($this.isMasonryEmpty()) {
$this.destroyMasonry();
return;
}
if($this.isMasonry())
$this.reloadMasonry();
else
$this.initMasonry();
});
break;
}
}
if(oData && oData.load_more != undefined)
oView.find('.' + $this.sSP + '-load-more-holder').html($.trim(oData.load_more));
if(oData && oData.back != undefined)
oView.find('.' + $this.sSP + '-back-holder').html($.trim(oData.back));
},
'json'
);
}; | return;
}
$this.destroyMasonry();
oView.find('.bx-tl-load-more').hide(); | random_line_split |
view.js | function | (oOptions) {
this._sActionsUri = oOptions.sActionUri;
this._sActionsUrl = oOptions.sActionUrl;
this._sObjName = oOptions.sObjName == undefined ? 'oTimelineView' : oOptions.sObjName;
this._iOwnerId = oOptions.iOwnerId == undefined ? 0 : oOptions.iOwnerId;
this._sAnimationEffect = oOptions.sAnimationEffect == undefined ? 'slide' : oOptions.sAnimationEffect;
this._iAnimationSpeed = oOptions.iAnimationSpeed == undefined ? 'slow' : oOptions.iAnimationSpeed;
this._aHtmlIds = oOptions.aHtmlIds == undefined ? {} : oOptions.aHtmlIds;
this._oRequestParams = oOptions.oRequestParams == undefined ? {} : oOptions.oRequestParams;
var $this = this;
$(document).ready(function() {
$this.initMasonry();
$('.bx-tl-item').resize(function() {
$this.reloadMasonry();
});
$('img.bx-tl-item-image').load(function() {
$this.reloadMasonry();
});
});
}
BxTimelineView.prototype = new BxTimelineMain();
BxTimelineView.prototype.changePage = function(oElement, iStart, iPerPage) {
this._oRequestParams.start = iStart;
this._oRequestParams.per_page = iPerPage;
this._getPosts(oElement, 'page');
};
BxTimelineView.prototype.changeFilter = function(oLink) {
var sId = $(oLink).attr('id');
this._oRequestParams.start = 0;
this._oRequestParams.filter = sId.substr(sId.lastIndexOf('-') + 1, sId.length);
this._getPosts(oLink, 'filter');
};
BxTimelineView.prototype.changeTimeline = function(oLink, iYear) {
this._oRequestParams.start = 0;
this._oRequestParams.timeline = iYear;
this._getPosts(oLink, 'timeline');
};
BxTimelineView.prototype.deletePost = function(oLink, iId) {
var $this = this;
var oView = $(this.sIdView);
var oData = this._getDefaultData();
oData['id'] = iId;
this.loadingInBlock(oLink, true);
$.post(
this._sActionsUrl + 'delete/',
oData,
function(oData) {
$this.loadingInBlock(oLink, false);
if(oData && oData.msg != undefined)
alert(oData.msg);
if(oData && oData.code == 0)
$(oLink).parents('.bx-popup-applied:first:visible').dolPopupHide();
$($this.sIdItem + oData.id).bx_anim('hide', $this._sAnimationEffect, $this._iAnimationSpeed, function() {
$(this).remove();
if(oView.find('.bx-tl-item').length != 0) {
$this.reloadMasonry();
return;
}
$this.destroyMasonry();
oView.find('.bx-tl-load-more').hide();
oView.find('.bx-tl-empty').show();
});
},
'json'
);
};
BxTimelineView.prototype.showMoreContent = function(oLink) {
$(oLink).parent('span').next('span').show().prev('span').remove();
this.reloadMasonry();
};
BxTimelineView.prototype.showPhoto = function(oLink, sUrl) {
$('#' + this._aHtmlIds['photo_popup']).dolPopupImage(sUrl, $(oLink).parent());
};
BxTimelineView.prototype.commentItem = function(oLink, sSystem, iId) {
var $this = this;
var oData = this._getDefaultData();
oData['system'] = sSystem;
oData['id'] = iId;
var oComments = $(oLink).parents('.' + this.sClassItem + ':first').find('.' + this.sClassItemComments);
if(oComments.children().length > 0) {
oComments.bx_anim('toggle', this._sAnimationEffect, this._iAnimationSpeed);
return;
}
if(oLink)
this.loadingInItem(oLink, true);
jQuery.get (
this._sActionsUrl + 'get_comments',
oData,
function(oData) {
if(oLink)
$this.loadingInItem(oLink, false);
if(!oData.content)
return;
oComments.html($(oData.content).hide()).children(':hidden').bxTime().bx_anim('show', $this._sAnimationEffect, $this._iAnimationSpeed);
},
'json'
);
};
BxTimelineView.prototype._getPosts = function(oElement, sAction) {
var $this = this;
var oView = $(this.sIdView);
switch(sAction) {
case 'page':
this.loadingInButton(oElement, true);
break;
default:
this.loadingInBlock(oElement, true);
break;
}
jQuery.get(
this._sActionsUrl + 'get_posts/',
this._getDefaultData(),
function(oData) {
if(oData && oData.items != undefined) {
var sItems = $.trim(oData.items);
switch(sAction) {
case 'page':
$this.loadingInButton(oElement, false);
$this.appendMasonry($(sItems).bxTime());
break;
default:
$this.loadingInBlock(oElement, false);
oView.find('.' + $this.sClassItems).bx_anim('hide', $this._sAnimationEffect, $this._iAnimationSpeed, function() {
$(this).html(sItems).show().bxTime();
if($this.isMasonryEmpty()) {
$this.destroyMasonry();
return;
}
if($this.isMasonry())
$this.reloadMasonry();
else
$this.initMasonry();
});
break;
}
}
if(oData && oData.load_more != undefined)
oView.find('.' + $this.sSP + '-load-more-holder').html($.trim(oData.load_more));
if(oData && oData.back != undefined)
oView.find('.' + $this.sSP + '-back-holder').html($.trim(oData.back));
},
'json'
);
};
| BxTimelineView | identifier_name |
akPu3CaloJetSequence15_cff.py | import FWCore.ParameterSet.Config as cms
from HeavyIonsAnalysis.JetAnalysis.jets.akPu3CaloJetSequence_PbPb_mc_cff import *
#PU jets: type 15
akPu3Calomatch15 = akPu3Calomatch.clone(src = cms.InputTag("akPu3CaloJets15"))
akPu3Caloparton15 = akPu3Caloparton.clone(src = cms.InputTag("akPu3CaloJets15"))
akPu3Calocorr15 = akPu3Calocorr.clone(src = cms.InputTag("akPu3CaloJets15"))
akPu3CalopatJets15 = akPu3CalopatJets.clone(jetSource = cms.InputTag("akPu3CaloJets15"),
jetCorrFactorsSource = cms.VInputTag(cms.InputTag("akPu3Calocorr15")),
genJetMatch = cms.InputTag("akPu3Calomatch15"), | akPu3CaloJetSequence15 = cms.Sequence(akPu3Calomatch15
*
akPu3Caloparton15
*
akPu3Calocorr15
*
akPu3CalopatJets15
*
akPu3CaloJetAnalyzer15
) | genPartonMatch = cms.InputTag("akPu3Caloparton15"),
)
akPu3CaloJetAnalyzer15 = akPu3CaloJetAnalyzer.clone(jetTag = cms.InputTag("akPu3CalopatJets15"), doSubEvent = cms.untracked.bool(True) ) | random_line_split |
action_registry_test.py | # coding: utf-8
#
# Copyright 2018 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for methods in the action registry."""
from __future__ import absolute_import # pylint: disable=import-only-modules
from __future__ import unicode_literals # pylint: disable=import-only-modules
from core.domain import action_registry
from core.tests import test_utils
class ActionRegistryUnitTests(test_utils.GenericTestBase):
"""Test for the action registry."""
def | (self):
"""Do some sanity checks on the action registry."""
self.assertEqual(
len(action_registry.Registry.get_all_actions()), 3)
| test_action_registry | identifier_name |
action_registry_test.py | # coding: utf-8
#
# Copyright 2018 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for methods in the action registry."""
from __future__ import absolute_import # pylint: disable=import-only-modules
from __future__ import unicode_literals # pylint: disable=import-only-modules
from core.domain import action_registry
from core.tests import test_utils
class ActionRegistryUnitTests(test_utils.GenericTestBase):
"""Test for the action registry.""" |
def test_action_registry(self):
"""Do some sanity checks on the action registry."""
self.assertEqual(
len(action_registry.Registry.get_all_actions()), 3) | random_line_split | |
action_registry_test.py | # coding: utf-8
#
# Copyright 2018 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for methods in the action registry."""
from __future__ import absolute_import # pylint: disable=import-only-modules
from __future__ import unicode_literals # pylint: disable=import-only-modules
from core.domain import action_registry
from core.tests import test_utils
class ActionRegistryUnitTests(test_utils.GenericTestBase):
| """Test for the action registry."""
def test_action_registry(self):
"""Do some sanity checks on the action registry."""
self.assertEqual(
len(action_registry.Registry.get_all_actions()), 3) | identifier_body | |
app.module.ts | import { NgModule, ApplicationRef } from '@angular/core';
import { BrowserModule } from '@angular/platform-browser';
import { FormsModule } from '@angular/forms';
import { HttpModule } from '@angular/http';
import { RouterModule, PreloadAllModules } from '@angular/router';
import { removeNgStyles, createNewHosts, createInputTransfer } from '@angularclass/hmr';
/*
* Platform and Environment providers/directives/pipes
*/
import { ENV_PROVIDERS } from './environment';
import { ROUTES } from './app.routes';
// App is our top level component
import { AppComponent } from './app.component';
import { APP_RESOLVER_PROVIDERS } from './app.resolver';
import { AppState, InternalStateType } from './app.service';
import { HomeComponent } from './home';
import { QuizComponent } from './quiz';
import { StatsComponent } from './stats';
import { AddNewWordComponent } from './home/add-new-word';
import { NoContentComponent } from './no-content';
import { QuizFormComponent } from './quiz/quiz-form';
import { WordDetailComponent } from './home/worddetail';
import { WordFilterComponent } from './home/word-filter';
import { WordlistComponent } from './home/wordlist';
import { XLarge } from './home/x-large';
// Application wide providers
const APP_PROVIDERS = [
...APP_RESOLVER_PROVIDERS,
AppState
];
type StoreType = {
state: InternalStateType,
restoreInputValues: () => void,
disposeOldHosts: () => void
};
/**
* `AppModule` is the main entry point into Angular2's bootstraping process
*/
@NgModule({
bootstrap: [ AppComponent ],
declarations: [
AddNewWordComponent,
AppComponent,
HomeComponent,
StatsComponent,
QuizComponent,
QuizFormComponent,
NoContentComponent,
WordDetailComponent,
WordFilterComponent,
WordlistComponent,
XLarge
],
imports: [ // import Angular's modules
BrowserModule,
FormsModule,
HttpModule,
RouterModule.forRoot(ROUTES, { useHash: true, preloadingStrategy: PreloadAllModules })
],
providers: [ // expose our Services and Providers into Angular's dependency injection
ENV_PROVIDERS,
APP_PROVIDERS
]
})
export class AppModule {
constructor(public appRef: ApplicationRef, public appState: AppState) {
}
hmrOnInit(store: StoreType) {
if (!store || !store.state) return;
console.log('HMR store', JSON.stringify(store, null, 2));
// set state
this.appState._state = store.state;
// set input values
if ('restoreInputValues' in store) {
let restoreInputValues = store.restoreInputValues;
setTimeout(restoreInputValues);
}
this.appRef.tick();
delete store.state;
delete store.restoreInputValues;
}
| (store: StoreType) {
const cmpLocation = this.appRef.components.map(cmp => cmp.location.nativeElement);
// save state
const state = this.appState._state;
store.state = state;
// recreate root elements
store.disposeOldHosts = createNewHosts(cmpLocation);
// save input values
store.restoreInputValues = createInputTransfer();
// remove styles
removeNgStyles();
}
hmrAfterDestroy(store: StoreType) {
// display new elements
store.disposeOldHosts();
delete store.disposeOldHosts;
}
}
| hmrOnDestroy | identifier_name |
app.module.ts | import { NgModule, ApplicationRef } from '@angular/core';
import { BrowserModule } from '@angular/platform-browser';
import { FormsModule } from '@angular/forms';
import { HttpModule } from '@angular/http';
import { RouterModule, PreloadAllModules } from '@angular/router';
import { removeNgStyles, createNewHosts, createInputTransfer } from '@angularclass/hmr';
/*
* Platform and Environment providers/directives/pipes
*/
import { ENV_PROVIDERS } from './environment';
import { ROUTES } from './app.routes';
// App is our top level component
import { AppComponent } from './app.component';
import { APP_RESOLVER_PROVIDERS } from './app.resolver';
import { AppState, InternalStateType } from './app.service';
import { HomeComponent } from './home';
import { QuizComponent } from './quiz';
import { StatsComponent } from './stats';
import { AddNewWordComponent } from './home/add-new-word';
import { NoContentComponent } from './no-content';
import { QuizFormComponent } from './quiz/quiz-form';
import { WordDetailComponent } from './home/worddetail';
import { WordFilterComponent } from './home/word-filter';
import { WordlistComponent } from './home/wordlist';
import { XLarge } from './home/x-large';
// Application wide providers
const APP_PROVIDERS = [
...APP_RESOLVER_PROVIDERS, | state: InternalStateType,
restoreInputValues: () => void,
disposeOldHosts: () => void
};
/**
* `AppModule` is the main entry point into Angular2's bootstraping process
*/
@NgModule({
bootstrap: [ AppComponent ],
declarations: [
AddNewWordComponent,
AppComponent,
HomeComponent,
StatsComponent,
QuizComponent,
QuizFormComponent,
NoContentComponent,
WordDetailComponent,
WordFilterComponent,
WordlistComponent,
XLarge
],
imports: [ // import Angular's modules
BrowserModule,
FormsModule,
HttpModule,
RouterModule.forRoot(ROUTES, { useHash: true, preloadingStrategy: PreloadAllModules })
],
providers: [ // expose our Services and Providers into Angular's dependency injection
ENV_PROVIDERS,
APP_PROVIDERS
]
})
export class AppModule {
constructor(public appRef: ApplicationRef, public appState: AppState) {
}
hmrOnInit(store: StoreType) {
if (!store || !store.state) return;
console.log('HMR store', JSON.stringify(store, null, 2));
// set state
this.appState._state = store.state;
// set input values
if ('restoreInputValues' in store) {
let restoreInputValues = store.restoreInputValues;
setTimeout(restoreInputValues);
}
this.appRef.tick();
delete store.state;
delete store.restoreInputValues;
}
hmrOnDestroy(store: StoreType) {
const cmpLocation = this.appRef.components.map(cmp => cmp.location.nativeElement);
// save state
const state = this.appState._state;
store.state = state;
// recreate root elements
store.disposeOldHosts = createNewHosts(cmpLocation);
// save input values
store.restoreInputValues = createInputTransfer();
// remove styles
removeNgStyles();
}
hmrAfterDestroy(store: StoreType) {
// display new elements
store.disposeOldHosts();
delete store.disposeOldHosts;
}
} | AppState
];
type StoreType = { | random_line_split |
app.module.ts | import { NgModule, ApplicationRef } from '@angular/core';
import { BrowserModule } from '@angular/platform-browser';
import { FormsModule } from '@angular/forms';
import { HttpModule } from '@angular/http';
import { RouterModule, PreloadAllModules } from '@angular/router';
import { removeNgStyles, createNewHosts, createInputTransfer } from '@angularclass/hmr';
/*
* Platform and Environment providers/directives/pipes
*/
import { ENV_PROVIDERS } from './environment';
import { ROUTES } from './app.routes';
// App is our top level component
import { AppComponent } from './app.component';
import { APP_RESOLVER_PROVIDERS } from './app.resolver';
import { AppState, InternalStateType } from './app.service';
import { HomeComponent } from './home';
import { QuizComponent } from './quiz';
import { StatsComponent } from './stats';
import { AddNewWordComponent } from './home/add-new-word';
import { NoContentComponent } from './no-content';
import { QuizFormComponent } from './quiz/quiz-form';
import { WordDetailComponent } from './home/worddetail';
import { WordFilterComponent } from './home/word-filter';
import { WordlistComponent } from './home/wordlist';
import { XLarge } from './home/x-large';
// Application wide providers
const APP_PROVIDERS = [
...APP_RESOLVER_PROVIDERS,
AppState
];
type StoreType = {
state: InternalStateType,
restoreInputValues: () => void,
disposeOldHosts: () => void
};
/**
* `AppModule` is the main entry point into Angular2's bootstraping process
*/
@NgModule({
bootstrap: [ AppComponent ],
declarations: [
AddNewWordComponent,
AppComponent,
HomeComponent,
StatsComponent,
QuizComponent,
QuizFormComponent,
NoContentComponent,
WordDetailComponent,
WordFilterComponent,
WordlistComponent,
XLarge
],
imports: [ // import Angular's modules
BrowserModule,
FormsModule,
HttpModule,
RouterModule.forRoot(ROUTES, { useHash: true, preloadingStrategy: PreloadAllModules })
],
providers: [ // expose our Services and Providers into Angular's dependency injection
ENV_PROVIDERS,
APP_PROVIDERS
]
})
export class AppModule {
constructor(public appRef: ApplicationRef, public appState: AppState) {
}
hmrOnInit(store: StoreType) {
if (!store || !store.state) return;
console.log('HMR store', JSON.stringify(store, null, 2));
// set state
this.appState._state = store.state;
// set input values
if ('restoreInputValues' in store) |
this.appRef.tick();
delete store.state;
delete store.restoreInputValues;
}
hmrOnDestroy(store: StoreType) {
const cmpLocation = this.appRef.components.map(cmp => cmp.location.nativeElement);
// save state
const state = this.appState._state;
store.state = state;
// recreate root elements
store.disposeOldHosts = createNewHosts(cmpLocation);
// save input values
store.restoreInputValues = createInputTransfer();
// remove styles
removeNgStyles();
}
hmrAfterDestroy(store: StoreType) {
// display new elements
store.disposeOldHosts();
delete store.disposeOldHosts;
}
}
| {
let restoreInputValues = store.restoreInputValues;
setTimeout(restoreInputValues);
} | conditional_block |
staging.py | """Production settings and globals."""
from base import *
########## HOST CONFIGURATION
# See: https://docs.djangoproject.com/en/1.5/releases/1.5/#allowed-hosts-required-in-production
MAIN_HOST = ['openbilanci.staging.deppsviluppo.org',]
# Allowed hosts expansion: needed for servizi ai Comuni
HOSTS_COMUNI = [
'novara.comuni.deppsviluppo.org',
'rapallo.comuni.deppsviluppo.org',
'castiglionedellestiviere.comuni.deppsviluppo.org',
'firenze.comuni.deppsviluppo.org',
'terni.comuni.deppsviluppo.org'
]
ALLOWED_HOSTS += MAIN_HOST + HOSTS_COMUNI
########## END HOST CONFIGURATION
########## EMAIL CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
########## END EMAIL CONFIGURATION
########## TOOLBAR CONFIGURATION
# See: https://github.com/django-debug-toolbar/django-debug-toolbar#installation
INSTALLED_APPS += (
'debug_toolbar',
)
# See: https://github.com/django-debug-toolbar/django-debug-toolbar#installation
INTERNAL_IPS = ('176.31.74.29',)
# See: https://github.com/django-debug-toolbar/django-debug-toolbar#installation
MIDDLEWARE_CLASSES = (
'debug_toolbar.middleware.DebugToolbarMiddleware',
) + MIDDLEWARE_CLASSES
# See: https://github.com/django-debug-toolbar/django-debug-toolbar#installation
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
'SHOW_TEMPLATE_CONTEXT': True,
}
def show_toolbar(request):
print("IP Address for debug-toolbar: " + request.META['REMOTE_ADDR'])
return True
SHOW_TOOLBAR_CALLBACK = show_toolbar
DEBUG_TOOLBAR_PATCH_SETTINGS=False
########## END TOOLBAR CONFIGURATION
|
PATH_PREVENTIVI = BILANCI_PATH+"/%s/%s/Preventivo/%s.html"
PATH_CONSUNTIVI = BILANCI_PATH+"/%s/%s/Consuntivo/%s.html"
BILANCI_RAW_DB = 'bilanci_raw' |
BILANCI_PATH = "/home/open_bilanci/dati/bilanci_subset"
OUTPUT_FOLDER = '../scraper_project/scraper/output/'
LISTA_COMUNI = 'listacomuni.csv'
LISTA_COMUNI_PATH = OUTPUT_FOLDER + LISTA_COMUNI | random_line_split |
staging.py | """Production settings and globals."""
from base import *
########## HOST CONFIGURATION
# See: https://docs.djangoproject.com/en/1.5/releases/1.5/#allowed-hosts-required-in-production
MAIN_HOST = ['openbilanci.staging.deppsviluppo.org',]
# Allowed hosts expansion: needed for servizi ai Comuni
HOSTS_COMUNI = [
'novara.comuni.deppsviluppo.org',
'rapallo.comuni.deppsviluppo.org',
'castiglionedellestiviere.comuni.deppsviluppo.org',
'firenze.comuni.deppsviluppo.org',
'terni.comuni.deppsviluppo.org'
]
ALLOWED_HOSTS += MAIN_HOST + HOSTS_COMUNI
########## END HOST CONFIGURATION
########## EMAIL CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
########## END EMAIL CONFIGURATION
########## TOOLBAR CONFIGURATION
# See: https://github.com/django-debug-toolbar/django-debug-toolbar#installation
INSTALLED_APPS += (
'debug_toolbar',
)
# See: https://github.com/django-debug-toolbar/django-debug-toolbar#installation
INTERNAL_IPS = ('176.31.74.29',)
# See: https://github.com/django-debug-toolbar/django-debug-toolbar#installation
MIDDLEWARE_CLASSES = (
'debug_toolbar.middleware.DebugToolbarMiddleware',
) + MIDDLEWARE_CLASSES
# See: https://github.com/django-debug-toolbar/django-debug-toolbar#installation
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
'SHOW_TEMPLATE_CONTEXT': True,
}
def show_toolbar(request):
|
SHOW_TOOLBAR_CALLBACK = show_toolbar
DEBUG_TOOLBAR_PATCH_SETTINGS=False
########## END TOOLBAR CONFIGURATION
BILANCI_PATH = "/home/open_bilanci/dati/bilanci_subset"
OUTPUT_FOLDER = '../scraper_project/scraper/output/'
LISTA_COMUNI = 'listacomuni.csv'
LISTA_COMUNI_PATH = OUTPUT_FOLDER + LISTA_COMUNI
PATH_PREVENTIVI = BILANCI_PATH+"/%s/%s/Preventivo/%s.html"
PATH_CONSUNTIVI = BILANCI_PATH+"/%s/%s/Consuntivo/%s.html"
BILANCI_RAW_DB = 'bilanci_raw'
| print("IP Address for debug-toolbar: " + request.META['REMOTE_ADDR'])
return True | identifier_body |
staging.py | """Production settings and globals."""
from base import *
########## HOST CONFIGURATION
# See: https://docs.djangoproject.com/en/1.5/releases/1.5/#allowed-hosts-required-in-production
MAIN_HOST = ['openbilanci.staging.deppsviluppo.org',]
# Allowed hosts expansion: needed for servizi ai Comuni
HOSTS_COMUNI = [
'novara.comuni.deppsviluppo.org',
'rapallo.comuni.deppsviluppo.org',
'castiglionedellestiviere.comuni.deppsviluppo.org',
'firenze.comuni.deppsviluppo.org',
'terni.comuni.deppsviluppo.org'
]
ALLOWED_HOSTS += MAIN_HOST + HOSTS_COMUNI
########## END HOST CONFIGURATION
########## EMAIL CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
########## END EMAIL CONFIGURATION
########## TOOLBAR CONFIGURATION
# See: https://github.com/django-debug-toolbar/django-debug-toolbar#installation
INSTALLED_APPS += (
'debug_toolbar',
)
# See: https://github.com/django-debug-toolbar/django-debug-toolbar#installation
INTERNAL_IPS = ('176.31.74.29',)
# See: https://github.com/django-debug-toolbar/django-debug-toolbar#installation
MIDDLEWARE_CLASSES = (
'debug_toolbar.middleware.DebugToolbarMiddleware',
) + MIDDLEWARE_CLASSES
# See: https://github.com/django-debug-toolbar/django-debug-toolbar#installation
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
'SHOW_TEMPLATE_CONTEXT': True,
}
def | (request):
print("IP Address for debug-toolbar: " + request.META['REMOTE_ADDR'])
return True
SHOW_TOOLBAR_CALLBACK = show_toolbar
DEBUG_TOOLBAR_PATCH_SETTINGS=False
########## END TOOLBAR CONFIGURATION
BILANCI_PATH = "/home/open_bilanci/dati/bilanci_subset"
OUTPUT_FOLDER = '../scraper_project/scraper/output/'
LISTA_COMUNI = 'listacomuni.csv'
LISTA_COMUNI_PATH = OUTPUT_FOLDER + LISTA_COMUNI
PATH_PREVENTIVI = BILANCI_PATH+"/%s/%s/Preventivo/%s.html"
PATH_CONSUNTIVI = BILANCI_PATH+"/%s/%s/Consuntivo/%s.html"
BILANCI_RAW_DB = 'bilanci_raw'
| show_toolbar | identifier_name |
tooltip.ts | export let Tooltip = {
show(element: any, text: string) {
this.element = element;
if(this.tooltipVisible) {
// Hide any visible tooltips
this.hide();
}
let tooltip = this.getTooltip();
// element.parentNode.insertBefore(tooltip, element.nextSibling);
document.body.appendChild(tooltip);
tooltip.innerText = text;
this.addTooltipStyle(element, tooltip);
this.onMouseUpHandler = (e: any) => {
let target = e.target;
if(this.tooltipVisible && !target.classList.contains('tooltip')) {
this.hide();
}
}
this.onPageScrollHandler = () => this.addTooltipStyle(element, tooltip);
document.addEventListener('mouseup', this.onMouseUpHandler);
document.addEventListener('scroll', this.onPageScrollHandler);
this.tooltipVisible = true;
},
hide() {
document.body.removeChild(this.getTooltip());
this.tooltipVisible = false;
this.tooltip = null;
document.removeEventListener('mouseup', this.onMouseUpHandler);
document.removeEventListener('scroll', this.onPageScrollHandler);
},
getTooltip() {
if(!this.tooltip) {
let holder = document.createElement('span');
holder.classList.add('tooltip');
this.tooltip = holder;
}
return this.tooltip;
},
addTooltipStyle(element: any, tooltip: any) {
let offset = 6;
let tooltipWidth = tooltip.offsetWidth;
let tooltipHeight = tooltip.offsetHeight;
let elementPosition = element.getBoundingClientRect();
let elementWidth = element.offsetWidth;
let elementHeight = element.offsetHeight;
let pageScroll = document.documentElement.scrollTop || document.body.scrollTop;
tooltip.style.position = 'absolute';
tooltip.style.left = (elementPosition.left + (elementWidth / 2) - (tooltipWidth / 2)) + 'px';
if(tooltipHeight > elementPosition.top) | else {
tooltip.style.top = pageScroll + (elementPosition.top - (tooltipHeight + offset)) + 'px';
tooltip.classList.remove('bottom');
tooltip.classList.add('top');
}
}
}; | {
tooltip.style.top = pageScroll + (elementPosition.top + (elementHeight + offset)) + 'px';
tooltip.classList.remove('top');
tooltip.classList.add('bottom');
} | conditional_block |
tooltip.ts | export let Tooltip = {
show(element: any, text: string) {
this.element = element;
if(this.tooltipVisible) {
// Hide any visible tooltips
this.hide();
}
let tooltip = this.getTooltip();
// element.parentNode.insertBefore(tooltip, element.nextSibling);
document.body.appendChild(tooltip);
tooltip.innerText = text;
this.addTooltipStyle(element, tooltip);
this.onMouseUpHandler = (e: any) => {
let target = e.target;
if(this.tooltipVisible && !target.classList.contains('tooltip')) {
this.hide();
}
}
this.onPageScrollHandler = () => this.addTooltipStyle(element, tooltip);
document.addEventListener('mouseup', this.onMouseUpHandler);
document.addEventListener('scroll', this.onPageScrollHandler);
this.tooltipVisible = true;
},
hide() | ,
getTooltip() {
if(!this.tooltip) {
let holder = document.createElement('span');
holder.classList.add('tooltip');
this.tooltip = holder;
}
return this.tooltip;
},
addTooltipStyle(element: any, tooltip: any) {
let offset = 6;
let tooltipWidth = tooltip.offsetWidth;
let tooltipHeight = tooltip.offsetHeight;
let elementPosition = element.getBoundingClientRect();
let elementWidth = element.offsetWidth;
let elementHeight = element.offsetHeight;
let pageScroll = document.documentElement.scrollTop || document.body.scrollTop;
tooltip.style.position = 'absolute';
tooltip.style.left = (elementPosition.left + (elementWidth / 2) - (tooltipWidth / 2)) + 'px';
if(tooltipHeight > elementPosition.top) {
tooltip.style.top = pageScroll + (elementPosition.top + (elementHeight + offset)) + 'px';
tooltip.classList.remove('top');
tooltip.classList.add('bottom');
} else {
tooltip.style.top = pageScroll + (elementPosition.top - (tooltipHeight + offset)) + 'px';
tooltip.classList.remove('bottom');
tooltip.classList.add('top');
}
}
}; | {
document.body.removeChild(this.getTooltip());
this.tooltipVisible = false;
this.tooltip = null;
document.removeEventListener('mouseup', this.onMouseUpHandler);
document.removeEventListener('scroll', this.onPageScrollHandler);
} | identifier_body |
tooltip.ts | export let Tooltip = {
show(element: any, text: string) {
this.element = element;
if(this.tooltipVisible) {
// Hide any visible tooltips
this.hide();
}
let tooltip = this.getTooltip();
// element.parentNode.insertBefore(tooltip, element.nextSibling);
document.body.appendChild(tooltip);
tooltip.innerText = text;
this.addTooltipStyle(element, tooltip);
this.onMouseUpHandler = (e: any) => {
let target = e.target;
if(this.tooltipVisible && !target.classList.contains('tooltip')) {
this.hide();
}
}
this.onPageScrollHandler = () => this.addTooltipStyle(element, tooltip);
document.addEventListener('mouseup', this.onMouseUpHandler);
document.addEventListener('scroll', this.onPageScrollHandler);
this.tooltipVisible = true;
},
hide() {
document.body.removeChild(this.getTooltip());
this.tooltipVisible = false;
this.tooltip = null;
document.removeEventListener('mouseup', this.onMouseUpHandler);
document.removeEventListener('scroll', this.onPageScrollHandler);
},
| () {
if(!this.tooltip) {
let holder = document.createElement('span');
holder.classList.add('tooltip');
this.tooltip = holder;
}
return this.tooltip;
},
addTooltipStyle(element: any, tooltip: any) {
let offset = 6;
let tooltipWidth = tooltip.offsetWidth;
let tooltipHeight = tooltip.offsetHeight;
let elementPosition = element.getBoundingClientRect();
let elementWidth = element.offsetWidth;
let elementHeight = element.offsetHeight;
let pageScroll = document.documentElement.scrollTop || document.body.scrollTop;
tooltip.style.position = 'absolute';
tooltip.style.left = (elementPosition.left + (elementWidth / 2) - (tooltipWidth / 2)) + 'px';
if(tooltipHeight > elementPosition.top) {
tooltip.style.top = pageScroll + (elementPosition.top + (elementHeight + offset)) + 'px';
tooltip.classList.remove('top');
tooltip.classList.add('bottom');
} else {
tooltip.style.top = pageScroll + (elementPosition.top - (tooltipHeight + offset)) + 'px';
tooltip.classList.remove('bottom');
tooltip.classList.add('top');
}
}
}; | getTooltip | identifier_name |
tooltip.ts | export let Tooltip = {
show(element: any, text: string) {
this.element = element;
if(this.tooltipVisible) {
// Hide any visible tooltips
this.hide();
}
let tooltip = this.getTooltip();
// element.parentNode.insertBefore(tooltip, element.nextSibling);
document.body.appendChild(tooltip);
tooltip.innerText = text;
this.addTooltipStyle(element, tooltip);
this.onMouseUpHandler = (e: any) => {
let target = e.target;
if(this.tooltipVisible && !target.classList.contains('tooltip')) {
this.hide();
}
}
this.onPageScrollHandler = () => this.addTooltipStyle(element, tooltip);
document.addEventListener('mouseup', this.onMouseUpHandler);
document.addEventListener('scroll', this.onPageScrollHandler);
this.tooltipVisible = true;
},
hide() {
document.body.removeChild(this.getTooltip());
this.tooltipVisible = false;
this.tooltip = null;
document.removeEventListener('mouseup', this.onMouseUpHandler);
document.removeEventListener('scroll', this.onPageScrollHandler);
},
| holder.classList.add('tooltip');
this.tooltip = holder;
}
return this.tooltip;
},
addTooltipStyle(element: any, tooltip: any) {
let offset = 6;
let tooltipWidth = tooltip.offsetWidth;
let tooltipHeight = tooltip.offsetHeight;
let elementPosition = element.getBoundingClientRect();
let elementWidth = element.offsetWidth;
let elementHeight = element.offsetHeight;
let pageScroll = document.documentElement.scrollTop || document.body.scrollTop;
tooltip.style.position = 'absolute';
tooltip.style.left = (elementPosition.left + (elementWidth / 2) - (tooltipWidth / 2)) + 'px';
if(tooltipHeight > elementPosition.top) {
tooltip.style.top = pageScroll + (elementPosition.top + (elementHeight + offset)) + 'px';
tooltip.classList.remove('top');
tooltip.classList.add('bottom');
} else {
tooltip.style.top = pageScroll + (elementPosition.top - (tooltipHeight + offset)) + 'px';
tooltip.classList.remove('bottom');
tooltip.classList.add('top');
}
}
}; | getTooltip() {
if(!this.tooltip) {
let holder = document.createElement('span');
| random_line_split |
test_versions.js | import SagaTester from 'redux-saga-tester';
import * as versionsApi from 'amo/api/versions';
import versionsReducer, {
fetchVersions,
loadVersions,
} from 'amo/reducers/versions';
import versionsSaga from 'amo/sagas/versions';
import apiReducer from 'core/reducers/api';
import { createStubErrorHandler } from 'tests/unit/helpers';
import { dispatchClientMetadata, fakeVersion } from 'tests/unit/amo/helpers';
describe(__filename, () => {
const page = '2';
const slug = 'some-slug';
let clientData;
let errorHandler;
let mockApi;
let sagaTester;
beforeEach(() => {
errorHandler = createStubErrorHandler();
mockApi = sinon.mock(versionsApi);
clientData = dispatchClientMetadata();
sagaTester = new SagaTester({
initialState: clientData.state,
reducers: {
api: apiReducer,
versions: versionsReducer,
},
});
sagaTester.start(versionsSaga);
});
describe('fetchVersions', () => {
function _fetchVersions(params) |
it('calls the API to fetch versions', async () => {
const state = sagaTester.getState();
const versions = { results: [fakeVersion] };
mockApi
.expects('getVersions')
.withArgs({
api: state.api,
page,
slug,
})
.once()
.resolves(versions);
_fetchVersions({ page, slug });
const expectedAction = loadVersions({ slug, versions });
const loadAction = await sagaTester.waitFor(expectedAction.type);
expect(loadAction).toEqual(expectedAction);
mockApi.verify();
});
it('clears the error handler', async () => {
_fetchVersions({ page, slug });
const expectedAction = errorHandler.createClearingAction();
const action = await sagaTester.waitFor(expectedAction.type);
expect(action).toEqual(expectedAction);
});
it('dispatches an error', async () => {
const error = new Error('some API error maybe');
mockApi
.expects('getVersions')
.once()
.rejects(error);
_fetchVersions({ page, slug });
const expectedAction = errorHandler.createErrorAction(error);
const action = await sagaTester.waitFor(expectedAction.type);
expect(action).toEqual(expectedAction);
});
});
});
| {
sagaTester.dispatch(
fetchVersions({
errorHandlerId: errorHandler.id,
...params,
}),
);
} | identifier_body |
test_versions.js | import SagaTester from 'redux-saga-tester';
import * as versionsApi from 'amo/api/versions';
import versionsReducer, {
fetchVersions,
loadVersions,
} from 'amo/reducers/versions';
import versionsSaga from 'amo/sagas/versions';
import apiReducer from 'core/reducers/api';
import { createStubErrorHandler } from 'tests/unit/helpers';
import { dispatchClientMetadata, fakeVersion } from 'tests/unit/amo/helpers';
describe(__filename, () => {
const page = '2';
const slug = 'some-slug';
let clientData;
let errorHandler;
let mockApi;
let sagaTester;
beforeEach(() => {
errorHandler = createStubErrorHandler();
mockApi = sinon.mock(versionsApi);
clientData = dispatchClientMetadata();
sagaTester = new SagaTester({
initialState: clientData.state,
reducers: {
api: apiReducer,
versions: versionsReducer,
},
});
sagaTester.start(versionsSaga);
});
describe('fetchVersions', () => {
function | (params) {
sagaTester.dispatch(
fetchVersions({
errorHandlerId: errorHandler.id,
...params,
}),
);
}
it('calls the API to fetch versions', async () => {
const state = sagaTester.getState();
const versions = { results: [fakeVersion] };
mockApi
.expects('getVersions')
.withArgs({
api: state.api,
page,
slug,
})
.once()
.resolves(versions);
_fetchVersions({ page, slug });
const expectedAction = loadVersions({ slug, versions });
const loadAction = await sagaTester.waitFor(expectedAction.type);
expect(loadAction).toEqual(expectedAction);
mockApi.verify();
});
it('clears the error handler', async () => {
_fetchVersions({ page, slug });
const expectedAction = errorHandler.createClearingAction();
const action = await sagaTester.waitFor(expectedAction.type);
expect(action).toEqual(expectedAction);
});
it('dispatches an error', async () => {
const error = new Error('some API error maybe');
mockApi
.expects('getVersions')
.once()
.rejects(error);
_fetchVersions({ page, slug });
const expectedAction = errorHandler.createErrorAction(error);
const action = await sagaTester.waitFor(expectedAction.type);
expect(action).toEqual(expectedAction);
});
});
});
| _fetchVersions | identifier_name |
test_versions.js | import SagaTester from 'redux-saga-tester';
import * as versionsApi from 'amo/api/versions';
import versionsReducer, {
fetchVersions,
loadVersions,
} from 'amo/reducers/versions';
import versionsSaga from 'amo/sagas/versions';
import apiReducer from 'core/reducers/api';
import { createStubErrorHandler } from 'tests/unit/helpers';
import { dispatchClientMetadata, fakeVersion } from 'tests/unit/amo/helpers';
describe(__filename, () => {
const page = '2';
const slug = 'some-slug';
let clientData;
let errorHandler;
let mockApi;
let sagaTester;
beforeEach(() => {
errorHandler = createStubErrorHandler();
mockApi = sinon.mock(versionsApi);
clientData = dispatchClientMetadata();
sagaTester = new SagaTester({
initialState: clientData.state,
reducers: {
api: apiReducer,
versions: versionsReducer,
},
});
sagaTester.start(versionsSaga);
});
describe('fetchVersions', () => {
function _fetchVersions(params) {
sagaTester.dispatch(
fetchVersions({
errorHandlerId: errorHandler.id,
...params,
}),
);
}
it('calls the API to fetch versions', async () => {
const state = sagaTester.getState();
const versions = { results: [fakeVersion] }; | page,
slug,
})
.once()
.resolves(versions);
_fetchVersions({ page, slug });
const expectedAction = loadVersions({ slug, versions });
const loadAction = await sagaTester.waitFor(expectedAction.type);
expect(loadAction).toEqual(expectedAction);
mockApi.verify();
});
it('clears the error handler', async () => {
_fetchVersions({ page, slug });
const expectedAction = errorHandler.createClearingAction();
const action = await sagaTester.waitFor(expectedAction.type);
expect(action).toEqual(expectedAction);
});
it('dispatches an error', async () => {
const error = new Error('some API error maybe');
mockApi
.expects('getVersions')
.once()
.rejects(error);
_fetchVersions({ page, slug });
const expectedAction = errorHandler.createErrorAction(error);
const action = await sagaTester.waitFor(expectedAction.type);
expect(action).toEqual(expectedAction);
});
});
}); |
mockApi
.expects('getVersions')
.withArgs({
api: state.api, | random_line_split |
DataDocumentType.js | a,g);return a}); | // All material copyright ESRI, All Rights Reserved, unless otherwise specified.
// See http://js.arcgis.com/3.17/esri/copyright.txt for details.
//>>built
define("esri/dijit/metadata/types/gemini/base/DataDocumentType","dojo/_base/declare dojo/_base/lang dojo/has ./GeminiDocumentType ./DataRoot dojo/i18n!../../../nls/i18nGemini ../../../../../kernel".split(" "),function(a,c,d,e,f,b,g){a=a(e,{caption:b.documentTypes.data.caption,description:b.documentTypes.data.description,key:"gemini-iso-19115",isService:!1,metadataStandardName:"UK GEMINI",metadataStandardVersion:"2.2",newRootDescriptor:function(){return new f}});d("extend-esri")&&c.setObject("dijit.metadata.types.gemini.base.DataDocumentType", | random_line_split | |
fix_sensor_config_key.py | from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from builtins import *
import emission.core.get_database as edb
def fix_key(check_field, new_key):
print("First entry for "+new_key+" is %s" % list(edb.get_timeseries_db().find(
{"metadata.key": "config/sensor_config",
check_field: {"$exists": True}}).sort(
"metadata/write_ts").limit(1)))
udb = edb.get_usercache_db()
tdb = edb.get_timeseries_db()
for i, entry in enumerate(edb.get_timeseries_db().find(
{"metadata.key": "config/sensor_config",
check_field: {"$exists": True}})):
entry["metadata"]["key"] = new_key
if i % 10000 == 0:
print(udb.insert(entry))
print(tdb.remove(entry["_id"]))
else:
|
fix_key("data.battery_status", "background/battery")
fix_key("data.latitude", "background/location")
fix_key("data.zzaEh", "background/motion_activity")
fix_key("data.currState", "statemachine/transition")
| udb.insert(entry)
tdb.remove(entry["_id"]) | conditional_block |
fix_sensor_config_key.py | from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from builtins import *
import emission.core.get_database as edb
def | (check_field, new_key):
print("First entry for "+new_key+" is %s" % list(edb.get_timeseries_db().find(
{"metadata.key": "config/sensor_config",
check_field: {"$exists": True}}).sort(
"metadata/write_ts").limit(1)))
udb = edb.get_usercache_db()
tdb = edb.get_timeseries_db()
for i, entry in enumerate(edb.get_timeseries_db().find(
{"metadata.key": "config/sensor_config",
check_field: {"$exists": True}})):
entry["metadata"]["key"] = new_key
if i % 10000 == 0:
print(udb.insert(entry))
print(tdb.remove(entry["_id"]))
else:
udb.insert(entry)
tdb.remove(entry["_id"])
fix_key("data.battery_status", "background/battery")
fix_key("data.latitude", "background/location")
fix_key("data.zzaEh", "background/motion_activity")
fix_key("data.currState", "statemachine/transition")
| fix_key | identifier_name |
fix_sensor_config_key.py | from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from builtins import *
import emission.core.get_database as edb
def fix_key(check_field, new_key):
print("First entry for "+new_key+" is %s" % list(edb.get_timeseries_db().find( | check_field: {"$exists": True}}).sort(
"metadata/write_ts").limit(1)))
udb = edb.get_usercache_db()
tdb = edb.get_timeseries_db()
for i, entry in enumerate(edb.get_timeseries_db().find(
{"metadata.key": "config/sensor_config",
check_field: {"$exists": True}})):
entry["metadata"]["key"] = new_key
if i % 10000 == 0:
print(udb.insert(entry))
print(tdb.remove(entry["_id"]))
else:
udb.insert(entry)
tdb.remove(entry["_id"])
fix_key("data.battery_status", "background/battery")
fix_key("data.latitude", "background/location")
fix_key("data.zzaEh", "background/motion_activity")
fix_key("data.currState", "statemachine/transition") | {"metadata.key": "config/sensor_config", | random_line_split |
fix_sensor_config_key.py | from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from builtins import *
import emission.core.get_database as edb
def fix_key(check_field, new_key):
|
fix_key("data.battery_status", "background/battery")
fix_key("data.latitude", "background/location")
fix_key("data.zzaEh", "background/motion_activity")
fix_key("data.currState", "statemachine/transition")
| print("First entry for "+new_key+" is %s" % list(edb.get_timeseries_db().find(
{"metadata.key": "config/sensor_config",
check_field: {"$exists": True}}).sort(
"metadata/write_ts").limit(1)))
udb = edb.get_usercache_db()
tdb = edb.get_timeseries_db()
for i, entry in enumerate(edb.get_timeseries_db().find(
{"metadata.key": "config/sensor_config",
check_field: {"$exists": True}})):
entry["metadata"]["key"] = new_key
if i % 10000 == 0:
print(udb.insert(entry))
print(tdb.remove(entry["_id"]))
else:
udb.insert(entry)
tdb.remove(entry["_id"]) | identifier_body |
base.py | """
Base/mixin classes for the spatial backend database operations and the
`SpatialRefSys` model the backend.
"""
import re
from django.contrib.gis import gdal
from django.utils import six
from django.utils.encoding import python_2_unicode_compatible
class BaseSpatialOperations(object):
"""
This module holds the base `BaseSpatialBackend` object, which is
instantiated by each spatial database backend with the features
it has.
"""
distance_functions = {}
geometry_functions = {}
geometry_operators = {}
geography_operators = {}
geography_functions = {}
gis_terms = set()
truncate_params = {}
# Quick booleans for the type of this spatial backend, and
# an attribute for the spatial database version tuple (if applicable)
postgis = False
spatialite = False
mysql = False
oracle = False
spatial_version = None
# How the geometry column should be selected.
select = None
# Does the spatial database have a geometry or geography type?
geography = False
geometry = False
area = False
centroid = False
difference = False
distance = False
distance_sphere = False
distance_spheroid = False
envelope = False
force_rhr = False
mem_size = False
bounding_circle = False
num_geom = False
num_points = False
perimeter = False
perimeter3d = False
point_on_surface = False
polygonize = False
reverse = False
scale = False
snap_to_grid = False
sym_difference = False
transform = False
translate = False
union = False
# Aggregates
collect = False
extent = False
extent3d = False
make_line = False
unionagg = False
# Serialization
geohash = False
geojson = False
gml = False
kml = False
svg = False
# Constructors
from_text = False
from_wkb = False
# Default conversion functions for aggregates; will be overridden if implemented
# for the spatial backend.
def convert_extent(self, box):
raise NotImplementedError('Aggregate extent not implemented for this spatial backend.')
def convert_extent3d(self, box):
raise NotImplementedError('Aggregate 3D extent not implemented for this spatial backend.')
def convert_geom(self, geom_val, geom_field):
raise NotImplementedError('Aggregate method not implemented for this spatial backend.')
# For quoting column values, rather than columns.
def geo_quote_name(self, name):
return "'%s'" % name
# GeometryField operations
def geo_db_type(self, f):
"""
Returns the database column type for the geometry field on
the spatial backend.
"""
raise NotImplementedError('subclasses of BaseSpatialOperations must provide a geo_db_type() method')
def get_distance(self, f, value, lookup_type):
"""
Returns the distance parameters for the given geometry field,
lookup value, and lookup type.
"""
raise NotImplementedError('Distance operations not available on this spatial backend.')
def get_geom_placeholder(self, f, value):
"""
Returns the placeholder for the given geometry field with the given
value. Depending on the spatial backend, the placeholder may contain a
stored procedure call to the transformation function of the spatial
backend.
"""
raise NotImplementedError('subclasses of BaseSpatialOperations must provide a geo_db_placeholder() method')
def get_expression_column(self, evaluator):
"""
Helper method to return the quoted column string from the evaluator
for its expression.
"""
for expr, col_tup in evaluator.cols:
if expr is evaluator.expression:
return '%s.%s' % tuple(map(self.quote_name, col_tup))
raise Exception("Could not find the column for the expression.")
# Spatial SQL Construction
def spatial_aggregate_sql(self, agg):
raise NotImplementedError('Aggregate support not implemented for this spatial backend.')
def spatial_lookup_sql(self, lvalue, lookup_type, value, field):
raise NotImplementedError('subclasses of BaseSpatialOperations must a provide spatial_lookup_sql() method')
# Routines for getting the OGC-compliant models.
def geometry_columns(self):
raise NotImplementedError('subclasses of BaseSpatialOperations must a provide geometry_columns() method')
def spatial_ref_sys(self):
raise NotImplementedError('subclasses of BaseSpatialOperations must a provide spatial_ref_sys() method')
@python_2_unicode_compatible
class SpatialRefSysMixin(object):
"""
The SpatialRefSysMixin is a class used by the database-dependent
SpatialRefSys objects to reduce redundnant code.
"""
# For pulling out the spheroid from the spatial reference string. This
# regular expression is used only if the user does not have GDAL installed.
# TODO: Flattening not used in all ellipsoids, could also be a minor axis,
# or 'b' parameter.
spheroid_regex = re.compile(r'.+SPHEROID\[\"(?P<name>.+)\",(?P<major>\d+(\.\d+)?),(?P<flattening>\d{3}\.\d+),')
# For pulling out the units on platforms w/o GDAL installed.
# TODO: Figure out how to pull out angular units of projected coordinate system and
# fix for LOCAL_CS types. GDAL should be highly recommended for performing
# distance queries.
units_regex = re.compile(r'.+UNIT ?\["(?P<unit_name>[\w \'\(\)]+)", ?(?P<unit>[\d\.]+)(,AUTHORITY\["(?P<unit_auth_name>[\w \'\(\)]+)","(?P<unit_auth_val>\d+)"\])?\]([\w ]+)?(,AUTHORITY\["(?P<auth_name>[\w \'\(\)]+)","(?P<auth_val>\d+)"\])?\]$')
@property
def srs(self):
"""
Returns a GDAL SpatialReference object, if GDAL is installed.
"""
if gdal.HAS_GDAL:
# TODO: Is caching really necessary here? Is complexity worth it?
if hasattr(self, '_srs'):
# Returning a clone of the cached SpatialReference object.
return self._srs.clone()
else:
# Attempting to cache a SpatialReference object.
# Trying to get from WKT first.
try:
self._srs = gdal.SpatialReference(self.wkt)
return self.srs
except Exception as msg:
pass
try: |
raise Exception('Could not get OSR SpatialReference from WKT: %s\nError:\n%s' % (self.wkt, msg))
else:
raise Exception('GDAL is not installed.')
@property
def ellipsoid(self):
"""
Returns a tuple of the ellipsoid parameters:
(semimajor axis, semiminor axis, and inverse flattening).
"""
if gdal.HAS_GDAL:
return self.srs.ellipsoid
else:
m = self.spheroid_regex.match(self.wkt)
if m:
return (float(m.group('major')), float(m.group('flattening')))
else:
return None
@property
def name(self):
"Returns the projection name."
return self.srs.name
@property
def spheroid(self):
"Returns the spheroid name for this spatial reference."
return self.srs['spheroid']
@property
def datum(self):
"Returns the datum for this spatial reference."
return self.srs['datum']
@property
def projected(self):
"Is this Spatial Reference projected?"
if gdal.HAS_GDAL:
return self.srs.projected
else:
return self.wkt.startswith('PROJCS')
@property
def local(self):
"Is this Spatial Reference local?"
if gdal.HAS_GDAL:
return self.srs.local
else:
return self.wkt.startswith('LOCAL_CS')
@property
def geographic(self):
"Is this Spatial Reference geographic?"
if gdal.HAS_GDAL:
return self.srs.geographic
else:
return self.wkt.startswith('GEOGCS')
@property
def linear_name(self):
"Returns the linear units name."
if gdal.HAS_GDAL:
return self.srs.linear_name
elif self.geographic:
return None
else:
m = self.units_regex.match(self.wkt)
return m.group('unit_name')
@property
def linear_units(self):
"Returns the linear units."
if gdal.HAS_GDAL:
return self.srs.linear_units
elif self.geographic:
return None
else:
m = self.units_regex.match(self.wkt)
return m.group('unit')
@property
def angular_name(self):
"Returns the name of the angular units."
if gdal.HAS_GDAL:
return self.srs.angular_name
elif self.projected:
return None
else:
m = self.units_regex.match(self.wkt)
return m.group('unit_name')
@property
def angular_units(self):
"Returns the angular units."
if gdal.HAS_GDAL:
return self.srs.angular_units
elif self.projected:
return None
else:
m = self.units_regex.match(self.wkt)
return m.group('unit')
@property
def units(self):
"Returns a tuple of the units and the name."
if self.projected or self.local:
return (self.linear_units, self.linear_name)
elif self.geographic:
return (self.angular_units, self.angular_name)
else:
return (None, None)
@classmethod
def get_units(cls, wkt):
"""
Class method used by GeometryField on initialization to
retrive the units on the given WKT, without having to use
any of the database fields.
"""
if gdal.HAS_GDAL:
return gdal.SpatialReference(wkt).units
else:
m = cls.units_regex.match(wkt)
return m.group('unit'), m.group('unit_name')
@classmethod
def get_spheroid(cls, wkt, string=True):
"""
Class method used by GeometryField on initialization to
retrieve the `SPHEROID[..]` parameters from the given WKT.
"""
if gdal.HAS_GDAL:
srs = gdal.SpatialReference(wkt)
sphere_params = srs.ellipsoid
sphere_name = srs['spheroid']
else:
m = cls.spheroid_regex.match(wkt)
if m:
sphere_params = (float(m.group('major')), float(m.group('flattening')))
sphere_name = m.group('name')
else:
return None
if not string:
return sphere_name, sphere_params
else:
# `string` parameter used to place in format acceptable by PostGIS
if len(sphere_params) == 3:
radius, flattening = sphere_params[0], sphere_params[2]
else:
radius, flattening = sphere_params
return 'SPHEROID["%s",%s,%s]' % (sphere_name, radius, flattening)
def __str__(self):
"""
Returns the string representation. If GDAL is installed,
it will be 'pretty' OGC WKT.
"""
try:
return six.text_type(self.srs)
except Exception:
return six.text_type(self.wkt) | self._srs = gdal.SpatialReference(self.proj4text)
return self.srs
except Exception as msg:
pass | random_line_split |
base.py | """
Base/mixin classes for the spatial backend database operations and the
`SpatialRefSys` model the backend.
"""
import re
from django.contrib.gis import gdal
from django.utils import six
from django.utils.encoding import python_2_unicode_compatible
class BaseSpatialOperations(object):
"""
This module holds the base `BaseSpatialBackend` object, which is
instantiated by each spatial database backend with the features
it has.
"""
distance_functions = {}
geometry_functions = {}
geometry_operators = {}
geography_operators = {}
geography_functions = {}
gis_terms = set()
truncate_params = {}
# Quick booleans for the type of this spatial backend, and
# an attribute for the spatial database version tuple (if applicable)
postgis = False
spatialite = False
mysql = False
oracle = False
spatial_version = None
# How the geometry column should be selected.
select = None
# Does the spatial database have a geometry or geography type?
geography = False
geometry = False
area = False
centroid = False
difference = False
distance = False
distance_sphere = False
distance_spheroid = False
envelope = False
force_rhr = False
mem_size = False
bounding_circle = False
num_geom = False
num_points = False
perimeter = False
perimeter3d = False
point_on_surface = False
polygonize = False
reverse = False
scale = False
snap_to_grid = False
sym_difference = False
transform = False
translate = False
union = False
# Aggregates
collect = False
extent = False
extent3d = False
make_line = False
unionagg = False
# Serialization
geohash = False
geojson = False
gml = False
kml = False
svg = False
# Constructors
from_text = False
from_wkb = False
# Default conversion functions for aggregates; will be overridden if implemented
# for the spatial backend.
def convert_extent(self, box):
raise NotImplementedError('Aggregate extent not implemented for this spatial backend.')
def convert_extent3d(self, box):
raise NotImplementedError('Aggregate 3D extent not implemented for this spatial backend.')
def convert_geom(self, geom_val, geom_field):
raise NotImplementedError('Aggregate method not implemented for this spatial backend.')
# For quoting column values, rather than columns.
def geo_quote_name(self, name):
return "'%s'" % name
# GeometryField operations
def geo_db_type(self, f):
"""
Returns the database column type for the geometry field on
the spatial backend.
"""
raise NotImplementedError('subclasses of BaseSpatialOperations must provide a geo_db_type() method')
def get_distance(self, f, value, lookup_type):
"""
Returns the distance parameters for the given geometry field,
lookup value, and lookup type.
"""
raise NotImplementedError('Distance operations not available on this spatial backend.')
def get_geom_placeholder(self, f, value):
"""
Returns the placeholder for the given geometry field with the given
value. Depending on the spatial backend, the placeholder may contain a
stored procedure call to the transformation function of the spatial
backend.
"""
raise NotImplementedError('subclasses of BaseSpatialOperations must provide a geo_db_placeholder() method')
def get_expression_column(self, evaluator):
"""
Helper method to return the quoted column string from the evaluator
for its expression.
"""
for expr, col_tup in evaluator.cols:
if expr is evaluator.expression:
return '%s.%s' % tuple(map(self.quote_name, col_tup))
raise Exception("Could not find the column for the expression.")
# Spatial SQL Construction
def spatial_aggregate_sql(self, agg):
raise NotImplementedError('Aggregate support not implemented for this spatial backend.')
def spatial_lookup_sql(self, lvalue, lookup_type, value, field):
raise NotImplementedError('subclasses of BaseSpatialOperations must a provide spatial_lookup_sql() method')
# Routines for getting the OGC-compliant models.
def geometry_columns(self):
|
def spatial_ref_sys(self):
raise NotImplementedError('subclasses of BaseSpatialOperations must a provide spatial_ref_sys() method')
@python_2_unicode_compatible
class SpatialRefSysMixin(object):
"""
The SpatialRefSysMixin is a class used by the database-dependent
SpatialRefSys objects to reduce redundnant code.
"""
# For pulling out the spheroid from the spatial reference string. This
# regular expression is used only if the user does not have GDAL installed.
# TODO: Flattening not used in all ellipsoids, could also be a minor axis,
# or 'b' parameter.
spheroid_regex = re.compile(r'.+SPHEROID\[\"(?P<name>.+)\",(?P<major>\d+(\.\d+)?),(?P<flattening>\d{3}\.\d+),')
# For pulling out the units on platforms w/o GDAL installed.
# TODO: Figure out how to pull out angular units of projected coordinate system and
# fix for LOCAL_CS types. GDAL should be highly recommended for performing
# distance queries.
units_regex = re.compile(r'.+UNIT ?\["(?P<unit_name>[\w \'\(\)]+)", ?(?P<unit>[\d\.]+)(,AUTHORITY\["(?P<unit_auth_name>[\w \'\(\)]+)","(?P<unit_auth_val>\d+)"\])?\]([\w ]+)?(,AUTHORITY\["(?P<auth_name>[\w \'\(\)]+)","(?P<auth_val>\d+)"\])?\]$')
@property
def srs(self):
"""
Returns a GDAL SpatialReference object, if GDAL is installed.
"""
if gdal.HAS_GDAL:
# TODO: Is caching really necessary here? Is complexity worth it?
if hasattr(self, '_srs'):
# Returning a clone of the cached SpatialReference object.
return self._srs.clone()
else:
# Attempting to cache a SpatialReference object.
# Trying to get from WKT first.
try:
self._srs = gdal.SpatialReference(self.wkt)
return self.srs
except Exception as msg:
pass
try:
self._srs = gdal.SpatialReference(self.proj4text)
return self.srs
except Exception as msg:
pass
raise Exception('Could not get OSR SpatialReference from WKT: %s\nError:\n%s' % (self.wkt, msg))
else:
raise Exception('GDAL is not installed.')
@property
def ellipsoid(self):
"""
Returns a tuple of the ellipsoid parameters:
(semimajor axis, semiminor axis, and inverse flattening).
"""
if gdal.HAS_GDAL:
return self.srs.ellipsoid
else:
m = self.spheroid_regex.match(self.wkt)
if m:
return (float(m.group('major')), float(m.group('flattening')))
else:
return None
@property
def name(self):
"Returns the projection name."
return self.srs.name
@property
def spheroid(self):
"Returns the spheroid name for this spatial reference."
return self.srs['spheroid']
@property
def datum(self):
"Returns the datum for this spatial reference."
return self.srs['datum']
@property
def projected(self):
"Is this Spatial Reference projected?"
if gdal.HAS_GDAL:
return self.srs.projected
else:
return self.wkt.startswith('PROJCS')
@property
def local(self):
"Is this Spatial Reference local?"
if gdal.HAS_GDAL:
return self.srs.local
else:
return self.wkt.startswith('LOCAL_CS')
@property
def geographic(self):
"Is this Spatial Reference geographic?"
if gdal.HAS_GDAL:
return self.srs.geographic
else:
return self.wkt.startswith('GEOGCS')
@property
def linear_name(self):
"Returns the linear units name."
if gdal.HAS_GDAL:
return self.srs.linear_name
elif self.geographic:
return None
else:
m = self.units_regex.match(self.wkt)
return m.group('unit_name')
@property
def linear_units(self):
"Returns the linear units."
if gdal.HAS_GDAL:
return self.srs.linear_units
elif self.geographic:
return None
else:
m = self.units_regex.match(self.wkt)
return m.group('unit')
@property
def angular_name(self):
"Returns the name of the angular units."
if gdal.HAS_GDAL:
return self.srs.angular_name
elif self.projected:
return None
else:
m = self.units_regex.match(self.wkt)
return m.group('unit_name')
@property
def angular_units(self):
"Returns the angular units."
if gdal.HAS_GDAL:
return self.srs.angular_units
elif self.projected:
return None
else:
m = self.units_regex.match(self.wkt)
return m.group('unit')
@property
def units(self):
"Returns a tuple of the units and the name."
if self.projected or self.local:
return (self.linear_units, self.linear_name)
elif self.geographic:
return (self.angular_units, self.angular_name)
else:
return (None, None)
@classmethod
def get_units(cls, wkt):
"""
Class method used by GeometryField on initialization to
retrive the units on the given WKT, without having to use
any of the database fields.
"""
if gdal.HAS_GDAL:
return gdal.SpatialReference(wkt).units
else:
m = cls.units_regex.match(wkt)
return m.group('unit'), m.group('unit_name')
@classmethod
def get_spheroid(cls, wkt, string=True):
"""
Class method used by GeometryField on initialization to
retrieve the `SPHEROID[..]` parameters from the given WKT.
"""
if gdal.HAS_GDAL:
srs = gdal.SpatialReference(wkt)
sphere_params = srs.ellipsoid
sphere_name = srs['spheroid']
else:
m = cls.spheroid_regex.match(wkt)
if m:
sphere_params = (float(m.group('major')), float(m.group('flattening')))
sphere_name = m.group('name')
else:
return None
if not string:
return sphere_name, sphere_params
else:
# `string` parameter used to place in format acceptable by PostGIS
if len(sphere_params) == 3:
radius, flattening = sphere_params[0], sphere_params[2]
else:
radius, flattening = sphere_params
return 'SPHEROID["%s",%s,%s]' % (sphere_name, radius, flattening)
def __str__(self):
"""
Returns the string representation. If GDAL is installed,
it will be 'pretty' OGC WKT.
"""
try:
return six.text_type(self.srs)
except Exception:
return six.text_type(self.wkt)
| raise NotImplementedError('subclasses of BaseSpatialOperations must a provide geometry_columns() method') | identifier_body |
base.py | """
Base/mixin classes for the spatial backend database operations and the
`SpatialRefSys` model the backend.
"""
import re
from django.contrib.gis import gdal
from django.utils import six
from django.utils.encoding import python_2_unicode_compatible
class BaseSpatialOperations(object):
"""
This module holds the base `BaseSpatialBackend` object, which is
instantiated by each spatial database backend with the features
it has.
"""
distance_functions = {}
geometry_functions = {}
geometry_operators = {}
geography_operators = {}
geography_functions = {}
gis_terms = set()
truncate_params = {}
# Quick booleans for the type of this spatial backend, and
# an attribute for the spatial database version tuple (if applicable)
postgis = False
spatialite = False
mysql = False
oracle = False
spatial_version = None
# How the geometry column should be selected.
select = None
# Does the spatial database have a geometry or geography type?
geography = False
geometry = False
area = False
centroid = False
difference = False
distance = False
distance_sphere = False
distance_spheroid = False
envelope = False
force_rhr = False
mem_size = False
bounding_circle = False
num_geom = False
num_points = False
perimeter = False
perimeter3d = False
point_on_surface = False
polygonize = False
reverse = False
scale = False
snap_to_grid = False
sym_difference = False
transform = False
translate = False
union = False
# Aggregates
collect = False
extent = False
extent3d = False
make_line = False
unionagg = False
# Serialization
geohash = False
geojson = False
gml = False
kml = False
svg = False
# Constructors
from_text = False
from_wkb = False
# Default conversion functions for aggregates; will be overridden if implemented
# for the spatial backend.
def convert_extent(self, box):
raise NotImplementedError('Aggregate extent not implemented for this spatial backend.')
def convert_extent3d(self, box):
raise NotImplementedError('Aggregate 3D extent not implemented for this spatial backend.')
def convert_geom(self, geom_val, geom_field):
raise NotImplementedError('Aggregate method not implemented for this spatial backend.')
# For quoting column values, rather than columns.
def geo_quote_name(self, name):
return "'%s'" % name
# GeometryField operations
def geo_db_type(self, f):
"""
Returns the database column type for the geometry field on
the spatial backend.
"""
raise NotImplementedError('subclasses of BaseSpatialOperations must provide a geo_db_type() method')
def get_distance(self, f, value, lookup_type):
"""
Returns the distance parameters for the given geometry field,
lookup value, and lookup type.
"""
raise NotImplementedError('Distance operations not available on this spatial backend.')
def get_geom_placeholder(self, f, value):
"""
Returns the placeholder for the given geometry field with the given
value. Depending on the spatial backend, the placeholder may contain a
stored procedure call to the transformation function of the spatial
backend.
"""
raise NotImplementedError('subclasses of BaseSpatialOperations must provide a geo_db_placeholder() method')
def get_expression_column(self, evaluator):
"""
Helper method to return the quoted column string from the evaluator
for its expression.
"""
for expr, col_tup in evaluator.cols:
if expr is evaluator.expression:
return '%s.%s' % tuple(map(self.quote_name, col_tup))
raise Exception("Could not find the column for the expression.")
# Spatial SQL Construction
def spatial_aggregate_sql(self, agg):
raise NotImplementedError('Aggregate support not implemented for this spatial backend.')
def spatial_lookup_sql(self, lvalue, lookup_type, value, field):
raise NotImplementedError('subclasses of BaseSpatialOperations must a provide spatial_lookup_sql() method')
# Routines for getting the OGC-compliant models.
def geometry_columns(self):
raise NotImplementedError('subclasses of BaseSpatialOperations must a provide geometry_columns() method')
def spatial_ref_sys(self):
raise NotImplementedError('subclasses of BaseSpatialOperations must a provide spatial_ref_sys() method')
@python_2_unicode_compatible
class SpatialRefSysMixin(object):
"""
The SpatialRefSysMixin is a class used by the database-dependent
SpatialRefSys objects to reduce redundnant code.
"""
# For pulling out the spheroid from the spatial reference string. This
# regular expression is used only if the user does not have GDAL installed.
# TODO: Flattening not used in all ellipsoids, could also be a minor axis,
# or 'b' parameter.
spheroid_regex = re.compile(r'.+SPHEROID\[\"(?P<name>.+)\",(?P<major>\d+(\.\d+)?),(?P<flattening>\d{3}\.\d+),')
# For pulling out the units on platforms w/o GDAL installed.
# TODO: Figure out how to pull out angular units of projected coordinate system and
# fix for LOCAL_CS types. GDAL should be highly recommended for performing
# distance queries.
units_regex = re.compile(r'.+UNIT ?\["(?P<unit_name>[\w \'\(\)]+)", ?(?P<unit>[\d\.]+)(,AUTHORITY\["(?P<unit_auth_name>[\w \'\(\)]+)","(?P<unit_auth_val>\d+)"\])?\]([\w ]+)?(,AUTHORITY\["(?P<auth_name>[\w \'\(\)]+)","(?P<auth_val>\d+)"\])?\]$')
@property
def srs(self):
"""
Returns a GDAL SpatialReference object, if GDAL is installed.
"""
if gdal.HAS_GDAL:
# TODO: Is caching really necessary here? Is complexity worth it?
if hasattr(self, '_srs'):
# Returning a clone of the cached SpatialReference object.
return self._srs.clone()
else:
# Attempting to cache a SpatialReference object.
# Trying to get from WKT first.
try:
self._srs = gdal.SpatialReference(self.wkt)
return self.srs
except Exception as msg:
pass
try:
self._srs = gdal.SpatialReference(self.proj4text)
return self.srs
except Exception as msg:
pass
raise Exception('Could not get OSR SpatialReference from WKT: %s\nError:\n%s' % (self.wkt, msg))
else:
raise Exception('GDAL is not installed.')
@property
def ellipsoid(self):
"""
Returns a tuple of the ellipsoid parameters:
(semimajor axis, semiminor axis, and inverse flattening).
"""
if gdal.HAS_GDAL:
return self.srs.ellipsoid
else:
m = self.spheroid_regex.match(self.wkt)
if m:
return (float(m.group('major')), float(m.group('flattening')))
else:
return None
@property
def name(self):
"Returns the projection name."
return self.srs.name
@property
def spheroid(self):
"Returns the spheroid name for this spatial reference."
return self.srs['spheroid']
@property
def datum(self):
"Returns the datum for this spatial reference."
return self.srs['datum']
@property
def projected(self):
"Is this Spatial Reference projected?"
if gdal.HAS_GDAL:
return self.srs.projected
else:
return self.wkt.startswith('PROJCS')
@property
def local(self):
"Is this Spatial Reference local?"
if gdal.HAS_GDAL:
return self.srs.local
else:
return self.wkt.startswith('LOCAL_CS')
@property
def geographic(self):
"Is this Spatial Reference geographic?"
if gdal.HAS_GDAL:
return self.srs.geographic
else:
return self.wkt.startswith('GEOGCS')
@property
def linear_name(self):
"Returns the linear units name."
if gdal.HAS_GDAL:
return self.srs.linear_name
elif self.geographic:
return None
else:
|
@property
def linear_units(self):
"Returns the linear units."
if gdal.HAS_GDAL:
return self.srs.linear_units
elif self.geographic:
return None
else:
m = self.units_regex.match(self.wkt)
return m.group('unit')
@property
def angular_name(self):
"Returns the name of the angular units."
if gdal.HAS_GDAL:
return self.srs.angular_name
elif self.projected:
return None
else:
m = self.units_regex.match(self.wkt)
return m.group('unit_name')
@property
def angular_units(self):
"Returns the angular units."
if gdal.HAS_GDAL:
return self.srs.angular_units
elif self.projected:
return None
else:
m = self.units_regex.match(self.wkt)
return m.group('unit')
@property
def units(self):
"Returns a tuple of the units and the name."
if self.projected or self.local:
return (self.linear_units, self.linear_name)
elif self.geographic:
return (self.angular_units, self.angular_name)
else:
return (None, None)
@classmethod
def get_units(cls, wkt):
"""
Class method used by GeometryField on initialization to
retrive the units on the given WKT, without having to use
any of the database fields.
"""
if gdal.HAS_GDAL:
return gdal.SpatialReference(wkt).units
else:
m = cls.units_regex.match(wkt)
return m.group('unit'), m.group('unit_name')
@classmethod
def get_spheroid(cls, wkt, string=True):
"""
Class method used by GeometryField on initialization to
retrieve the `SPHEROID[..]` parameters from the given WKT.
"""
if gdal.HAS_GDAL:
srs = gdal.SpatialReference(wkt)
sphere_params = srs.ellipsoid
sphere_name = srs['spheroid']
else:
m = cls.spheroid_regex.match(wkt)
if m:
sphere_params = (float(m.group('major')), float(m.group('flattening')))
sphere_name = m.group('name')
else:
return None
if not string:
return sphere_name, sphere_params
else:
# `string` parameter used to place in format acceptable by PostGIS
if len(sphere_params) == 3:
radius, flattening = sphere_params[0], sphere_params[2]
else:
radius, flattening = sphere_params
return 'SPHEROID["%s",%s,%s]' % (sphere_name, radius, flattening)
def __str__(self):
"""
Returns the string representation. If GDAL is installed,
it will be 'pretty' OGC WKT.
"""
try:
return six.text_type(self.srs)
except Exception:
return six.text_type(self.wkt)
| m = self.units_regex.match(self.wkt)
return m.group('unit_name') | conditional_block |
base.py | """
Base/mixin classes for the spatial backend database operations and the
`SpatialRefSys` model the backend.
"""
import re
from django.contrib.gis import gdal
from django.utils import six
from django.utils.encoding import python_2_unicode_compatible
class BaseSpatialOperations(object):
"""
This module holds the base `BaseSpatialBackend` object, which is
instantiated by each spatial database backend with the features
it has.
"""
distance_functions = {}
geometry_functions = {}
geometry_operators = {}
geography_operators = {}
geography_functions = {}
gis_terms = set()
truncate_params = {}
# Quick booleans for the type of this spatial backend, and
# an attribute for the spatial database version tuple (if applicable)
postgis = False
spatialite = False
mysql = False
oracle = False
spatial_version = None
# How the geometry column should be selected.
select = None
# Does the spatial database have a geometry or geography type?
geography = False
geometry = False
area = False
centroid = False
difference = False
distance = False
distance_sphere = False
distance_spheroid = False
envelope = False
force_rhr = False
mem_size = False
bounding_circle = False
num_geom = False
num_points = False
perimeter = False
perimeter3d = False
point_on_surface = False
polygonize = False
reverse = False
scale = False
snap_to_grid = False
sym_difference = False
transform = False
translate = False
union = False
# Aggregates
collect = False
extent = False
extent3d = False
make_line = False
unionagg = False
# Serialization
geohash = False
geojson = False
gml = False
kml = False
svg = False
# Constructors
from_text = False
from_wkb = False
# Default conversion functions for aggregates; will be overridden if implemented
# for the spatial backend.
def convert_extent(self, box):
raise NotImplementedError('Aggregate extent not implemented for this spatial backend.')
def convert_extent3d(self, box):
raise NotImplementedError('Aggregate 3D extent not implemented for this spatial backend.')
def convert_geom(self, geom_val, geom_field):
raise NotImplementedError('Aggregate method not implemented for this spatial backend.')
# For quoting column values, rather than columns.
def geo_quote_name(self, name):
return "'%s'" % name
# GeometryField operations
def geo_db_type(self, f):
"""
Returns the database column type for the geometry field on
the spatial backend.
"""
raise NotImplementedError('subclasses of BaseSpatialOperations must provide a geo_db_type() method')
def get_distance(self, f, value, lookup_type):
"""
Returns the distance parameters for the given geometry field,
lookup value, and lookup type.
"""
raise NotImplementedError('Distance operations not available on this spatial backend.')
def get_geom_placeholder(self, f, value):
"""
Returns the placeholder for the given geometry field with the given
value. Depending on the spatial backend, the placeholder may contain a
stored procedure call to the transformation function of the spatial
backend.
"""
raise NotImplementedError('subclasses of BaseSpatialOperations must provide a geo_db_placeholder() method')
def get_expression_column(self, evaluator):
"""
Helper method to return the quoted column string from the evaluator
for its expression.
"""
for expr, col_tup in evaluator.cols:
if expr is evaluator.expression:
return '%s.%s' % tuple(map(self.quote_name, col_tup))
raise Exception("Could not find the column for the expression.")
# Spatial SQL Construction
def spatial_aggregate_sql(self, agg):
raise NotImplementedError('Aggregate support not implemented for this spatial backend.')
def | (self, lvalue, lookup_type, value, field):
raise NotImplementedError('subclasses of BaseSpatialOperations must a provide spatial_lookup_sql() method')
# Routines for getting the OGC-compliant models.
def geometry_columns(self):
raise NotImplementedError('subclasses of BaseSpatialOperations must a provide geometry_columns() method')
def spatial_ref_sys(self):
raise NotImplementedError('subclasses of BaseSpatialOperations must a provide spatial_ref_sys() method')
@python_2_unicode_compatible
class SpatialRefSysMixin(object):
"""
The SpatialRefSysMixin is a class used by the database-dependent
SpatialRefSys objects to reduce redundnant code.
"""
# For pulling out the spheroid from the spatial reference string. This
# regular expression is used only if the user does not have GDAL installed.
# TODO: Flattening not used in all ellipsoids, could also be a minor axis,
# or 'b' parameter.
spheroid_regex = re.compile(r'.+SPHEROID\[\"(?P<name>.+)\",(?P<major>\d+(\.\d+)?),(?P<flattening>\d{3}\.\d+),')
# For pulling out the units on platforms w/o GDAL installed.
# TODO: Figure out how to pull out angular units of projected coordinate system and
# fix for LOCAL_CS types. GDAL should be highly recommended for performing
# distance queries.
units_regex = re.compile(r'.+UNIT ?\["(?P<unit_name>[\w \'\(\)]+)", ?(?P<unit>[\d\.]+)(,AUTHORITY\["(?P<unit_auth_name>[\w \'\(\)]+)","(?P<unit_auth_val>\d+)"\])?\]([\w ]+)?(,AUTHORITY\["(?P<auth_name>[\w \'\(\)]+)","(?P<auth_val>\d+)"\])?\]$')
@property
def srs(self):
"""
Returns a GDAL SpatialReference object, if GDAL is installed.
"""
if gdal.HAS_GDAL:
# TODO: Is caching really necessary here? Is complexity worth it?
if hasattr(self, '_srs'):
# Returning a clone of the cached SpatialReference object.
return self._srs.clone()
else:
# Attempting to cache a SpatialReference object.
# Trying to get from WKT first.
try:
self._srs = gdal.SpatialReference(self.wkt)
return self.srs
except Exception as msg:
pass
try:
self._srs = gdal.SpatialReference(self.proj4text)
return self.srs
except Exception as msg:
pass
raise Exception('Could not get OSR SpatialReference from WKT: %s\nError:\n%s' % (self.wkt, msg))
else:
raise Exception('GDAL is not installed.')
@property
def ellipsoid(self):
"""
Returns a tuple of the ellipsoid parameters:
(semimajor axis, semiminor axis, and inverse flattening).
"""
if gdal.HAS_GDAL:
return self.srs.ellipsoid
else:
m = self.spheroid_regex.match(self.wkt)
if m:
return (float(m.group('major')), float(m.group('flattening')))
else:
return None
@property
def name(self):
"Returns the projection name."
return self.srs.name
@property
def spheroid(self):
"Returns the spheroid name for this spatial reference."
return self.srs['spheroid']
@property
def datum(self):
"Returns the datum for this spatial reference."
return self.srs['datum']
@property
def projected(self):
"Is this Spatial Reference projected?"
if gdal.HAS_GDAL:
return self.srs.projected
else:
return self.wkt.startswith('PROJCS')
@property
def local(self):
"Is this Spatial Reference local?"
if gdal.HAS_GDAL:
return self.srs.local
else:
return self.wkt.startswith('LOCAL_CS')
@property
def geographic(self):
"Is this Spatial Reference geographic?"
if gdal.HAS_GDAL:
return self.srs.geographic
else:
return self.wkt.startswith('GEOGCS')
@property
def linear_name(self):
"Returns the linear units name."
if gdal.HAS_GDAL:
return self.srs.linear_name
elif self.geographic:
return None
else:
m = self.units_regex.match(self.wkt)
return m.group('unit_name')
@property
def linear_units(self):
"Returns the linear units."
if gdal.HAS_GDAL:
return self.srs.linear_units
elif self.geographic:
return None
else:
m = self.units_regex.match(self.wkt)
return m.group('unit')
@property
def angular_name(self):
"Returns the name of the angular units."
if gdal.HAS_GDAL:
return self.srs.angular_name
elif self.projected:
return None
else:
m = self.units_regex.match(self.wkt)
return m.group('unit_name')
@property
def angular_units(self):
"Returns the angular units."
if gdal.HAS_GDAL:
return self.srs.angular_units
elif self.projected:
return None
else:
m = self.units_regex.match(self.wkt)
return m.group('unit')
@property
def units(self):
"Returns a tuple of the units and the name."
if self.projected or self.local:
return (self.linear_units, self.linear_name)
elif self.geographic:
return (self.angular_units, self.angular_name)
else:
return (None, None)
@classmethod
def get_units(cls, wkt):
"""
Class method used by GeometryField on initialization to
retrive the units on the given WKT, without having to use
any of the database fields.
"""
if gdal.HAS_GDAL:
return gdal.SpatialReference(wkt).units
else:
m = cls.units_regex.match(wkt)
return m.group('unit'), m.group('unit_name')
@classmethod
def get_spheroid(cls, wkt, string=True):
"""
Class method used by GeometryField on initialization to
retrieve the `SPHEROID[..]` parameters from the given WKT.
"""
if gdal.HAS_GDAL:
srs = gdal.SpatialReference(wkt)
sphere_params = srs.ellipsoid
sphere_name = srs['spheroid']
else:
m = cls.spheroid_regex.match(wkt)
if m:
sphere_params = (float(m.group('major')), float(m.group('flattening')))
sphere_name = m.group('name')
else:
return None
if not string:
return sphere_name, sphere_params
else:
# `string` parameter used to place in format acceptable by PostGIS
if len(sphere_params) == 3:
radius, flattening = sphere_params[0], sphere_params[2]
else:
radius, flattening = sphere_params
return 'SPHEROID["%s",%s,%s]' % (sphere_name, radius, flattening)
def __str__(self):
"""
Returns the string representation. If GDAL is installed,
it will be 'pretty' OGC WKT.
"""
try:
return six.text_type(self.srs)
except Exception:
return six.text_type(self.wkt)
| spatial_lookup_sql | identifier_name |
TransitionExampleGroupExplorer.js | import React, { Component } from 'react'
import { Form, Grid, Image, Transition } from 'shengnian-ui-react'
const transitions = [
'scale',
'fade', 'fade up', 'fade down', 'fade left', 'fade right',
'horizontal flip', 'vertical flip',
'drop',
'fly left', 'fly right', 'fly up', 'fly down',
'swing left', 'swing right', 'swing up', 'swing down',
'browse', 'browse right',
'slide down', 'slide up', 'slide right',
]
const options = transitions.map(name => ({ key: name, text: name, value: name }))
export default class TransitionExampleSingleExplorer extends Component {
state = { animation: transitions[0], duration: 500, visible: true }
handleChange = (e, { name, value }) => this.setState({ [name]: value })
handleVisibility = () => this.setState({ visible: !this.state.visible })
render() {
const { animation, duration, visible } = this.state
return (
<Grid columns={2}>
<Grid.Column as={Form}>
<Form.Select
label='Choose transition'
name='animation'
onChange={this.handleChange}
options={options}
value={animation}
/>
<Form.Input
label={`Duration: ${duration}ms `}
min={100}
max={2000}
name='duration'
onChange={this.handleChange} | <Form.Button content={visible ? 'Unmount' : 'Mount'} onClick={this.handleVisibility} />
</Grid.Column>
<Grid.Column>
<Transition.Group animation={animation} duration={duration}>
{visible && <Image centered size='small' src='/assets/images/leaves/4.png' />}
</Transition.Group>
</Grid.Column>
</Grid>
)
}
} | step={100}
type='range'
value={duration}
/> | random_line_split |
TransitionExampleGroupExplorer.js | import React, { Component } from 'react'
import { Form, Grid, Image, Transition } from 'shengnian-ui-react'
const transitions = [
'scale',
'fade', 'fade up', 'fade down', 'fade left', 'fade right',
'horizontal flip', 'vertical flip',
'drop',
'fly left', 'fly right', 'fly up', 'fly down',
'swing left', 'swing right', 'swing up', 'swing down',
'browse', 'browse right',
'slide down', 'slide up', 'slide right',
]
const options = transitions.map(name => ({ key: name, text: name, value: name }))
export default class | extends Component {
state = { animation: transitions[0], duration: 500, visible: true }
handleChange = (e, { name, value }) => this.setState({ [name]: value })
handleVisibility = () => this.setState({ visible: !this.state.visible })
render() {
const { animation, duration, visible } = this.state
return (
<Grid columns={2}>
<Grid.Column as={Form}>
<Form.Select
label='Choose transition'
name='animation'
onChange={this.handleChange}
options={options}
value={animation}
/>
<Form.Input
label={`Duration: ${duration}ms `}
min={100}
max={2000}
name='duration'
onChange={this.handleChange}
step={100}
type='range'
value={duration}
/>
<Form.Button content={visible ? 'Unmount' : 'Mount'} onClick={this.handleVisibility} />
</Grid.Column>
<Grid.Column>
<Transition.Group animation={animation} duration={duration}>
{visible && <Image centered size='small' src='/assets/images/leaves/4.png' />}
</Transition.Group>
</Grid.Column>
</Grid>
)
}
}
| TransitionExampleSingleExplorer | identifier_name |
keymap.rs | //! Keymap support
use std;
use std::ptr;
use libc::c_void;
use remacs_macros::lisp_fn;
use crate::{
buffers::current_buffer,
data::{aref, fset, indirect_function, set},
eval::{autoload_do_load, unbind_to},
indent::indent_to,
keyboard,
keyboard::lucid_event_type_list_p,
lisp::LispObject,
lists::{nth, setcdr},
lists::{LispCons, LispConsCircularChecks, LispConsEndChecks},
obarray::intern,
remacs_sys::{
access_keymap, copy_keymap_item, describe_vector, make_save_funcptr_ptr_obj,
map_char_table, map_keymap_call, map_keymap_char_table_item, map_keymap_function_t,
map_keymap_item, maybe_quit, specbind,
},
remacs_sys::{char_bits, current_global_map as _current_global_map, globals, EmacsInt},
remacs_sys::{
Fcommand_remapping, Fcopy_sequence, Fcurrent_active_maps, Fevent_convert_list,
Fmake_char_table, Fpurecopy, Fset_char_table_range, Fterpri,
},
remacs_sys::{
Qautoload, Qkeymap, Qkeymapp, Qmouse_click, Qnil, Qstandard_output, Qt,
Qvector_or_char_table_p,
},
symbols::LispSymbolRef,
threads::{c_specpdl_index, ThreadState},
};
pub fn Ctl(c: char) -> i32 {
(c as i32) & 0x1f
}
// Hash table used to cache a reverse-map to speed up calls to where-is.
declare_GC_protected_static!(where_is_cache, Qnil);
/// Allows the C code to get the value of `where_is_cache`
#[no_mangle]
pub extern "C" fn get_where_is_cache() -> LispObject {
unsafe { where_is_cache }
}
/// Allows the C code to set the value of `where_is_cache`
#[no_mangle]
pub extern "C" fn set_where_is_cache(val: LispObject) {
unsafe {
where_is_cache = val;
}
}
// Which keymaps are reverse-stored in the cache.
declare_GC_protected_static!(where_is_cache_keymaps, Qt);
/// Allows the C code to get the value of `where_is_cache_keymaps`
#[no_mangle]
pub extern "C" fn get_where_is_cache_keymaps() -> LispObject {
unsafe { where_is_cache_keymaps }
}
/// Allows the C code to set the value of `where_is_cache_keymaps`
#[no_mangle]
pub extern "C" fn set_where_is_cache_keymaps(val: LispObject) {
unsafe {
where_is_cache_keymaps = val;
}
}
/// Check that OBJECT is a keymap (after dereferencing through any
/// symbols). If it is, return it.
///
/// If AUTOLOAD and if OBJECT is a symbol whose function value
/// is an autoload form, do the autoload and try again.
/// If AUTOLOAD, callers must assume GC is possible.
///
/// `ERROR_IF_NOT_KEYMAP` controls how we respond if OBJECT isn't a keymap.
/// If `ERROR_IF_NOT_KEYMAP`, signal an error; otherwise,
/// just return Qnil.
///
/// Note that most of the time, we don't want to pursue autoloads.
/// Functions like `Faccessible_keymaps` which scan entire keymap trees
/// shouldn't load every autoloaded keymap. I'm not sure about this,
/// but it seems to me that only `read_key_sequence`, `Flookup_key`, and
/// `Fdefine_key` should cause keymaps to be autoloaded.
///
/// This function can GC when AUTOLOAD is true, because it calls
/// `Fautoload_do_load` which can GC.
#[no_mangle]
pub extern "C" fn get_keymap(
object: LispObject,
error_if_not_keymap: bool,
autoload: bool,
) -> LispObject {
let object = object;
let mut autoload_retry = true;
while autoload_retry {
autoload_retry = false;
if object.is_nil() {
break;
}
if let Some((car, _)) = object.into() {
if car.eq(Qkeymap) {
return object;
}
}
let tem = indirect_function(object);
if let Some((car, _)) = tem.into() {
if car.eq(Qkeymap) {
return tem;
}
// Should we do an autoload? Autoload forms for keymaps have
// Qkeymap as their fifth element.
if (autoload || !error_if_not_keymap) && car.eq(Qautoload) && object.is_symbol() {
let tail = nth(4, tem);
if tail.eq(Qkeymap) {
if autoload {
autoload_do_load(tem, object, Qnil);
autoload_retry = true;
} else {
return object;
}
}
}
}
}
if error_if_not_keymap {
wrong_type!(Qkeymapp, object);
}
Qnil
}
/// Construct and return a new keymap, of the form (keymap CHARTABLE . ALIST).
/// CHARTABLE is a char-table that holds the bindings for all characters
/// without modifiers. All entries in it are initially nil, meaning
/// "command undefined". ALIST is an assoc-list which holds bindings for
/// function keys, mouse events, and any other things that appear in the
/// input stream. Initially, ALIST is nil.
///
/// The optional arg STRING supplies a menu name for the keymap
/// in case you use it as a menu with `x-popup-menu'.
#[lisp_fn(min = "0")]
pub fn make_keymap(string: LispObject) -> (LispObject, (LispObject, LispObject)) {
let tail: LispObject = if string.is_not_nil() {
list!(string)
} else {
Qnil
};
let char_table = unsafe { Fmake_char_table(Qkeymap, Qnil) };
(Qkeymap, (char_table, tail))
}
/// Return t if OBJECT is a keymap.
///
/// A keymap is a list (keymap . ALIST),
/// or a symbol whose function definition is itself a keymap.
/// ALIST elements look like (CHAR . DEFN) or (SYMBOL . DEFN);
/// a vector of densely packed bindings for small character codes
/// is also allowed as an element.
#[lisp_fn]
pub fn keymapp(object: LispObject) -> bool {
let map = get_keymap(object, false, false);
map.is_not_nil()
}
/// Return the parent map of KEYMAP, or nil if it has none.
/// We assume that KEYMAP is a valid keymap.
#[no_mangle]
pub extern "C" fn keymap_parent(keymap: LispObject, autoload: bool) -> LispObject |
/// Return the parent keymap of KEYMAP.
/// If KEYMAP has no parent, return nil.
#[lisp_fn(name = "keymap-parent", c_name = "keymap_parent")]
pub fn keymap_parent_lisp(keymap: LispObject) -> LispObject {
keymap_parent(keymap, true)
}
/// Check whether MAP is one of MAPS parents.
#[no_mangle]
pub extern "C" fn keymap_memberp(map: LispObject, maps: LispObject) -> bool {
let map = map;
let mut maps = maps;
if map.is_nil() {
return false;
}
while keymapp(maps) && !map.eq(maps) {
maps = keymap_parent(maps, false);
}
map.eq(maps)
}
/// Modify KEYMAP to set its parent map to PARENT.
/// Return PARENT. PARENT should be nil or another keymap.
#[lisp_fn]
pub fn set_keymap_parent(keymap: LispObject, parent: LispObject) -> LispObject {
// Flush any reverse-map cache
unsafe {
where_is_cache = Qnil;
where_is_cache_keymaps = Qt;
}
let mut parent = parent;
let keymap = get_keymap(keymap, true, true);
if parent.is_not_nil() {
parent = get_keymap(parent, true, false);
// Check for cycles
if keymap_memberp(keymap, parent) {
error!("Cyclic keymap inheritance");
}
}
// Skip past the initial element 'keymap'.
let mut prev = LispCons::from(keymap);
let mut list;
loop {
list = prev.cdr();
// If there is a parent keymap here, replace it.
// If we came to the end, add the parent in PREV.
match list.as_cons() {
None => break,
Some(cons) => {
if keymapp(list) {
break;
} else {
prev = cons;
}
}
}
}
prev.check_impure();
prev.set_cdr(parent);
parent
}
/// Return the prompt-string of a keymap MAP.
/// If non-nil, the prompt is shown in the echo-area
/// when reading a key-sequence to be looked-up in this keymap.
#[lisp_fn]
pub fn keymap_prompt(map: LispObject) -> LispObject {
let map = get_keymap(map, false, false);
for elt in map.iter_cars(LispConsEndChecks::off, LispConsCircularChecks::off) {
let mut tem = elt;
if tem.is_string() {
return tem;
} else if keymapp(tem) {
tem = keymap_prompt(tem);
if tem.is_not_nil() {
return tem;
}
}
}
Qnil
}
/// Same as `map_keymap_internal`, but traverses parent keymaps as well.
/// AUTOLOAD indicates that autoloaded keymaps should be loaded.
#[no_mangle]
pub unsafe extern "C" fn map_keymap(
map: LispObject,
fun: map_keymap_function_t,
args: LispObject,
data: *mut c_void,
autoload: bool,
) {
let mut map = get_keymap(map, true, autoload);
while map.is_cons() {
if let Some((car, cdr)) = map.into() {
if keymapp(car) {
map_keymap(car, fun, args, data, autoload);
map = cdr;
} else {
map = map_keymap_internal(map, fun, args, data);
}
}
if !map.is_cons() {
map = get_keymap(map, false, autoload);
}
}
}
/// Call FUNCTION once for each event binding in KEYMAP.
/// FUNCTION is called with two arguments: the event that is bound, and
/// the definition it is bound to. The event may be a character range.
///
/// If KEYMAP has a parent, the parent's bindings are included as well.
/// This works recursively: if the parent has itself a parent, then the
/// grandparent's bindings are also included and so on.
/// usage: (map-keymap FUNCTION KEYMAP)
#[lisp_fn(name = "map-keymap", c_name = "map_keymap", min = "2")]
pub fn map_keymap_lisp(function: LispObject, keymap: LispObject, sort_first: bool) -> LispObject {
if sort_first {
return call!(intern("map-keymap-sorted").into(), function, keymap);
}
unsafe {
map_keymap(
keymap,
Some(map_keymap_call),
function,
ptr::null_mut(),
true,
)
};
Qnil
}
/// Call FUN for every binding in MAP and stop at (and return) the parent.
/// FUN is called with 4 arguments: FUN (KEY, BINDING, ARGS, DATA).
#[no_mangle]
pub unsafe extern "C" fn map_keymap_internal(
map: LispObject,
fun: map_keymap_function_t,
args: LispObject,
data: *mut c_void,
) -> LispObject {
let map = map;
let tail = match map.into() {
None => Qnil,
Some((car, cdr)) => {
if car.eq(Qkeymap) {
cdr
} else {
map
}
}
};
let mut parent = tail;
for tail_cons in tail.iter_tails(LispConsEndChecks::off, LispConsCircularChecks::off) {
let binding = tail_cons.car();
if binding.eq(Qkeymap) {
break;
} else {
// An embedded parent.
if keymapp(binding) {
break;
}
if let Some((car, cdr)) = binding.into() {
map_keymap_item(fun, args, car, cdr, data);
} else if binding.is_vector() {
if let Some(binding_vec) = binding.as_vectorlike() {
for c in 0..binding_vec.pseudovector_size() {
map_keymap_item(fun, args, c.into(), aref(binding, c), data);
}
}
} else if binding.is_char_table() {
let saved = match fun {
Some(f) => make_save_funcptr_ptr_obj(Some(std::mem::transmute(f)), data, args),
None => make_save_funcptr_ptr_obj(None, data, args),
};
map_char_table(Some(map_keymap_char_table_item), Qnil, binding, saved);
}
}
parent = tail_cons.cdr();
}
parent
}
/// Call FUNCTION once for each event binding in KEYMAP.
/// FUNCTION is called with two arguments: the event that is bound, and
/// the definition it is bound to. The event may be a character range.
/// If KEYMAP has a parent, this function returns it without processing it.
#[lisp_fn(name = "map-keymap-internal", c_name = "map_keymap_internal")]
pub fn map_keymap_internal_lisp(function: LispObject, mut keymap: LispObject) -> LispObject {
keymap = get_keymap(keymap, true, true);
unsafe { map_keymap_internal(keymap, Some(map_keymap_call), function, ptr::null_mut()) }
}
/// Return the binding for command KEYS in current local keymap only.
/// KEYS is a string or vector, a sequence of keystrokes.
/// The binding is probably a symbol with a function definition.
/// If optional argument ACCEPT-DEFAULT is non-nil, recognize default
/// bindings; see the description of `lookup-key' for more details about this.
#[lisp_fn(min = "1")]
pub fn local_key_binding(keys: LispObject, accept_default: LispObject) -> LispObject {
let map = current_local_map();
if map.is_nil() {
Qnil
} else {
lookup_key(map, keys, accept_default)
}
}
/// Return current buffer's local keymap, or nil if it has none.
/// Normally the local keymap is set by the major mode with `use-local-map'.
#[lisp_fn]
pub fn current_local_map() -> LispObject {
ThreadState::current_buffer_unchecked().keymap_
}
/// Select KEYMAP as the local keymap.
/// If KEYMAP is nil, that means no local keymap.
#[lisp_fn]
pub fn use_local_map(mut keymap: LispObject) {
if !keymap.is_nil() {
let map = get_keymap(keymap, true, true);
keymap = map;
}
ThreadState::current_buffer_unchecked().keymap_ = keymap;
}
/// Return the binding for command KEYS in current global keymap only.
/// KEYS is a string or vector, a sequence of keystrokes.
/// The binding is probably a symbol with a function definition.
/// This function's return values are the same as those of `lookup-key'
/// (which see).
///
/// If optional argument ACCEPT-DEFAULT is non-nil, recognize default
/// bindings; see the description of `lookup-key' for more details about this.
#[lisp_fn(min = "1")]
pub fn global_key_binding(keys: LispObject, accept_default: LispObject) -> LispObject {
let map = current_global_map();
if map.is_nil() {
Qnil
} else {
lookup_key(map, keys, accept_default)
}
}
/// Return the current global keymap.
#[lisp_fn]
pub fn current_global_map() -> LispObject {
unsafe { _current_global_map }
}
/// Select KEYMAP as the global keymap.
#[lisp_fn]
pub fn use_global_map(keymap: LispObject) {
unsafe { _current_global_map = get_keymap(keymap, true, true) };
}
// Value is number if KEY is too long; nil if valid but has no definition.
// GC is possible in this function.
/// In keymap KEYMAP, look up key sequence KEY. Return the definition.
/// A value of nil means undefined. See doc of `define-key'
/// for kinds of definitions.
///
/// A number as value means KEY is "too long";
/// that is, characters or symbols in it except for the last one
/// fail to be a valid sequence of prefix characters in KEYMAP.
/// The number is how many characters at the front of KEY
/// it takes to reach a non-prefix key.
///
/// Normally, `lookup-key' ignores bindings for t, which act as default
/// bindings, used when nothing else in the keymap applies; this makes it
/// usable as a general function for probing keymaps. However, if the
/// third optional argument ACCEPT-DEFAULT is non-nil, `lookup-key' will
/// recognize the default bindings, just as `read-key-sequence' does.
#[lisp_fn(min = "2")]
pub fn lookup_key(keymap: LispObject, key: LispObject, accept_default: LispObject) -> LispObject {
let ok = accept_default.is_not_nil();
let mut keymap = get_keymap(keymap, true, true);
let length = key.as_vector_or_string_length() as EmacsInt;
if length == 0 {
return keymap;
}
let mut idx = 0;
loop {
let mut c = aref(key, idx);
idx += 1;
if c.is_cons() && lucid_event_type_list_p(c.into()) {
c = unsafe { Fevent_convert_list(c) };
}
// Turn the 8th bit of string chars into a meta modifier.
if let Some(k) = key.as_string() {
if let Some(x) = c.as_fixnum() {
let x = x as u32;
if x & 0x80 != 0 && !k.is_multibyte() {
c = ((x | char_bits::CHAR_META) & !0x80).into();
}
}
}
// Allow string since binding for `menu-bar-select-buffer'
// includes the buffer name in the key sequence.
if !(c.is_fixnum() || c.is_symbol() || c.is_cons() || c.is_string()) {
message_with_string!("Key sequence contains invalid event %s", c, true);
}
let cmd = unsafe { access_keymap(keymap, c, ok, false, true) };
if idx == length {
return cmd;
}
keymap = get_keymap(cmd, false, true);
if !keymap.is_cons() {
return idx.into();
}
unsafe {
maybe_quit();
};
}
}
/// Define COMMAND as a prefix command. COMMAND should be a symbol.
/// A new sparse keymap is stored as COMMAND's function definition and its
/// value.
/// This prepares COMMAND for use as a prefix key's binding.
/// If a second optional argument MAPVAR is given, it should be a symbol.
/// The map is then stored as MAPVAR's value instead of as COMMAND's
/// value; but COMMAND is still defined as a function.
/// The third optional argument NAME, if given, supplies a menu name
/// string for the map. This is required to use the keymap as a menu.
/// This function returns COMMAND.
#[lisp_fn(min = "1")]
pub fn define_prefix_command(
command: LispSymbolRef,
mapvar: LispObject,
name: LispObject,
) -> LispSymbolRef {
let map = make_sparse_keymap(name);
fset(command, map);
if mapvar.is_not_nil() {
set(mapvar.into(), map);
} else {
set(command, map);
}
command
}
/// Construct and return a new sparse keymap.
/// Its car is `keymap' and its cdr is an alist of (CHAR . DEFINITION),
/// which binds the character CHAR to DEFINITION, or (SYMBOL . DEFINITION),
/// which binds the function key or mouse event SYMBOL to DEFINITION.
/// Initially the alist is nil.
///
/// The optional arg STRING supplies a menu name for the keymap
/// in case you use it as a menu with `x-popup-menu'.
#[lisp_fn(min = "0")]
pub fn make_sparse_keymap(string: LispObject) -> LispObject {
if string.is_not_nil() {
let s = if unsafe { globals.Vpurify_flag }.is_not_nil() {
unsafe { Fpurecopy(string) }
} else {
string
};
list!(Qkeymap, s)
} else {
list!(Qkeymap)
}
}
#[no_mangle]
pub extern "C" fn describe_vector_princ(elt: LispObject, fun: LispObject) {
indent_to(16, 1.into());
call!(fun, elt);
unsafe { Fterpri(Qnil, Qnil) };
}
/// Insert a description of contents of VECTOR.
/// This is text showing the elements of vector matched against indices.
/// DESCRIBER is the output function used; nil means use `princ'.
#[lisp_fn(min = "1", name = "describe-vector", c_name = "describe_vector")]
pub fn describe_vector_lisp(vector: LispObject, mut describer: LispObject) {
if describer.is_nil() {
describer = intern("princ").into();
}
unsafe { specbind(Qstandard_output, current_buffer()) };
if !(vector.is_vector() || vector.is_char_table()) {
wrong_type!(Qvector_or_char_table_p, vector);
}
let count = c_specpdl_index();
unsafe {
describe_vector(
vector,
Qnil,
describer,
Some(describe_vector_princ),
false,
Qnil,
Qnil,
false,
false,
)
};
unbind_to(count, Qnil);
}
#[no_mangle]
pub extern "C" fn copy_keymap_1(chartable: LispObject, idx: LispObject, elt: LispObject) {
unsafe { Fset_char_table_range(chartable, idx, copy_keymap_item(elt)) };
}
/// Return a copy of the keymap KEYMAP.
///
/// Note that this is almost never needed. If you want a keymap that's like
/// another yet with a few changes, you should use map inheritance rather
/// than copying. I.e. something like:
///
/// (let ((map (make-sparse-keymap)))
/// (set-keymap-parent map <theirmap>)
/// (define-key map ...)
/// ...)
///
/// After performing `copy-keymap', the copy starts out with the same definitions
/// of KEYMAP, but changing either the copy or KEYMAP does not affect the other.
/// Any key definitions that are subkeymaps are recursively copied.
/// However, a key definition which is a symbol whose definition is a keymap
/// is not copied.
#[lisp_fn]
pub fn copy_keymap(keymap: LispObject) -> LispObject {
let keymap = get_keymap(keymap, true, false);
let mut tail = list!(Qkeymap);
let copy = tail;
let (_, mut keymap) = keymap.into(); // Skip the `keymap' symbol.
while let Some((mut elt, kmd)) = keymap.into() {
if elt.eq(Qkeymap) {
break;
}
if elt.is_char_table() {
elt = unsafe { Fcopy_sequence(elt) };
unsafe { map_char_table(Some(copy_keymap_1), Qnil, elt, elt) };
} else if let Some(v) = elt.as_vector() {
elt = unsafe { Fcopy_sequence(elt) };
let mut v2 = elt.as_vector().unwrap();
for (i, obj) in v.iter().enumerate() {
v2.set(i, unsafe { copy_keymap_item(obj) });
}
} else if let Some((front, back)) = elt.into() {
if front.eq(Qkeymap) {
// This is a sub keymap
elt = copy_keymap(elt);
} else {
elt = (front, unsafe { copy_keymap_item(back) }).into();
}
}
setcdr(tail.into(), list!(elt));
tail = LispCons::from(tail).cdr();
keymap = kmd;
}
setcdr(tail.into(), keymap);
copy
}
// GC is possible in this funtion if it autoloads a keymap.
/// Return the binding for command KEY in current keymaps.
/// KEY is a string or vector, a sequence of keystrokes.
/// The binding is probably a symbol with a function definition.
///
/// Normally, `key-binding' ignores bindings for t, which act as default
/// bindings, used when nothing else in the keymap applies; this makes it
/// usable as a general function for probing keymaps. However, if the
/// optional second argument ACCEPT-DEFAULT is non-nil, `key-binding' does
/// recognize the default bindings, just as `read-key-sequence' does.
///
/// Like the normal command loop, `key-binding' will remap the command
/// resulting from looking up KEY by looking up the command in the
/// current keymaps. However, if the optional third argument NO-REMAP
/// is non-nil, `key-binding' returns the unmapped command.
///
/// If KEY is a key sequence initiated with the mouse, the used keymaps
/// will depend on the clicked mouse position with regard to the buffer
/// and possible local keymaps on strings.
///
/// If the optional argument POSITION is non-nil, it specifies a mouse
/// position as returned by `event-start' and `event-end', and the lookup
/// occurs in the keymaps associated with it instead of KEY. It can also
/// be a number or marker, in which case the keymap properties at the
/// specified buffer position instead of point are used.
#[lisp_fn(min = "1")]
pub fn key_binding(
key: LispObject,
accept_default: bool,
no_remap: bool,
mut position: LispObject,
) -> LispObject {
if key.is_vector() && position.is_nil() {
let key = key.force_vector();
if key.len() == 0 {
return Qnil;
}
// Mouse events may have a symbolic prefix indicating the scrollbar or modeline
let idx = if key.get(0).is_symbol() && key.len() > 1 {
1
} else {
0
};
let event: keyboard::Event = key.get(idx).into();
// Ignore locations without event data
if event.has_data() {
let kind = event.head_kind();
if kind == Qmouse_click {
position = event.start()
}
}
}
let value = lookup_key(
(Qkeymap, unsafe { Fcurrent_active_maps(Qt, position) }).into(),
key,
accept_default.into(),
);
if value.is_nil() || value.is_integer() {
return Qnil;
}
// If the result of the ordinary keymap lookup is an interactive
// command, look for a key binding (i.e. remapping) for that command.
if !no_remap && value.is_symbol() {
let remap = unsafe { Fcommand_remapping(value, position, Qnil) };
if remap.is_not_nil() {
return remap;
}
}
value
}
include!(concat!(env!("OUT_DIR"), "/keymap_exports.rs"));
| {
let map = get_keymap(keymap, true, autoload);
let mut current = Qnil;
for elt in map.iter_tails(LispConsEndChecks::off, LispConsCircularChecks::off) {
current = elt.cdr();
if keymapp(current) {
return current;
}
}
get_keymap(current, false, autoload)
} | identifier_body |
keymap.rs | //! Keymap support
use std;
use std::ptr;
use libc::c_void;
use remacs_macros::lisp_fn;
use crate::{
buffers::current_buffer,
data::{aref, fset, indirect_function, set},
eval::{autoload_do_load, unbind_to},
indent::indent_to,
keyboard,
keyboard::lucid_event_type_list_p,
lisp::LispObject,
lists::{nth, setcdr},
lists::{LispCons, LispConsCircularChecks, LispConsEndChecks},
obarray::intern,
remacs_sys::{
access_keymap, copy_keymap_item, describe_vector, make_save_funcptr_ptr_obj,
map_char_table, map_keymap_call, map_keymap_char_table_item, map_keymap_function_t,
map_keymap_item, maybe_quit, specbind,
},
remacs_sys::{char_bits, current_global_map as _current_global_map, globals, EmacsInt},
remacs_sys::{
Fcommand_remapping, Fcopy_sequence, Fcurrent_active_maps, Fevent_convert_list,
Fmake_char_table, Fpurecopy, Fset_char_table_range, Fterpri,
},
remacs_sys::{
Qautoload, Qkeymap, Qkeymapp, Qmouse_click, Qnil, Qstandard_output, Qt,
Qvector_or_char_table_p,
},
symbols::LispSymbolRef,
threads::{c_specpdl_index, ThreadState},
};
pub fn Ctl(c: char) -> i32 {
(c as i32) & 0x1f
}
// Hash table used to cache a reverse-map to speed up calls to where-is.
declare_GC_protected_static!(where_is_cache, Qnil);
/// Allows the C code to get the value of `where_is_cache`
#[no_mangle]
pub extern "C" fn get_where_is_cache() -> LispObject {
unsafe { where_is_cache }
}
/// Allows the C code to set the value of `where_is_cache`
#[no_mangle]
pub extern "C" fn set_where_is_cache(val: LispObject) {
unsafe {
where_is_cache = val;
}
}
// Which keymaps are reverse-stored in the cache.
declare_GC_protected_static!(where_is_cache_keymaps, Qt);
/// Allows the C code to get the value of `where_is_cache_keymaps`
#[no_mangle]
pub extern "C" fn get_where_is_cache_keymaps() -> LispObject {
unsafe { where_is_cache_keymaps }
}
/// Allows the C code to set the value of `where_is_cache_keymaps`
#[no_mangle]
pub extern "C" fn set_where_is_cache_keymaps(val: LispObject) {
unsafe {
where_is_cache_keymaps = val;
}
}
/// Check that OBJECT is a keymap (after dereferencing through any
/// symbols). If it is, return it.
///
/// If AUTOLOAD and if OBJECT is a symbol whose function value
/// is an autoload form, do the autoload and try again.
/// If AUTOLOAD, callers must assume GC is possible.
///
/// `ERROR_IF_NOT_KEYMAP` controls how we respond if OBJECT isn't a keymap.
/// If `ERROR_IF_NOT_KEYMAP`, signal an error; otherwise,
/// just return Qnil.
///
/// Note that most of the time, we don't want to pursue autoloads.
/// Functions like `Faccessible_keymaps` which scan entire keymap trees
/// shouldn't load every autoloaded keymap. I'm not sure about this,
/// but it seems to me that only `read_key_sequence`, `Flookup_key`, and
/// `Fdefine_key` should cause keymaps to be autoloaded.
///
/// This function can GC when AUTOLOAD is true, because it calls
/// `Fautoload_do_load` which can GC.
#[no_mangle]
pub extern "C" fn get_keymap(
object: LispObject,
error_if_not_keymap: bool,
autoload: bool,
) -> LispObject {
let object = object;
let mut autoload_retry = true;
while autoload_retry {
autoload_retry = false;
if object.is_nil() {
break;
}
if let Some((car, _)) = object.into() {
if car.eq(Qkeymap) {
return object;
}
}
let tem = indirect_function(object);
if let Some((car, _)) = tem.into() {
if car.eq(Qkeymap) {
return tem;
}
// Should we do an autoload? Autoload forms for keymaps have
// Qkeymap as their fifth element.
if (autoload || !error_if_not_keymap) && car.eq(Qautoload) && object.is_symbol() {
let tail = nth(4, tem);
if tail.eq(Qkeymap) {
if autoload {
autoload_do_load(tem, object, Qnil);
autoload_retry = true;
} else {
return object;
}
}
}
}
}
if error_if_not_keymap {
wrong_type!(Qkeymapp, object);
}
Qnil
}
/// Construct and return a new keymap, of the form (keymap CHARTABLE . ALIST).
/// CHARTABLE is a char-table that holds the bindings for all characters
/// without modifiers. All entries in it are initially nil, meaning
/// "command undefined". ALIST is an assoc-list which holds bindings for
/// function keys, mouse events, and any other things that appear in the
/// input stream. Initially, ALIST is nil.
///
/// The optional arg STRING supplies a menu name for the keymap
/// in case you use it as a menu with `x-popup-menu'.
#[lisp_fn(min = "0")]
pub fn make_keymap(string: LispObject) -> (LispObject, (LispObject, LispObject)) {
let tail: LispObject = if string.is_not_nil() {
list!(string)
} else {
Qnil
};
let char_table = unsafe { Fmake_char_table(Qkeymap, Qnil) };
(Qkeymap, (char_table, tail))
}
/// Return t if OBJECT is a keymap.
///
/// A keymap is a list (keymap . ALIST),
/// or a symbol whose function definition is itself a keymap.
/// ALIST elements look like (CHAR . DEFN) or (SYMBOL . DEFN);
/// a vector of densely packed bindings for small character codes
/// is also allowed as an element.
#[lisp_fn]
pub fn keymapp(object: LispObject) -> bool {
let map = get_keymap(object, false, false);
map.is_not_nil()
}
/// Return the parent map of KEYMAP, or nil if it has none.
/// We assume that KEYMAP is a valid keymap.
#[no_mangle]
pub extern "C" fn keymap_parent(keymap: LispObject, autoload: bool) -> LispObject {
let map = get_keymap(keymap, true, autoload);
let mut current = Qnil;
for elt in map.iter_tails(LispConsEndChecks::off, LispConsCircularChecks::off) {
current = elt.cdr();
if keymapp(current) {
return current;
}
}
get_keymap(current, false, autoload)
}
/// Return the parent keymap of KEYMAP.
/// If KEYMAP has no parent, return nil.
#[lisp_fn(name = "keymap-parent", c_name = "keymap_parent")]
pub fn keymap_parent_lisp(keymap: LispObject) -> LispObject {
keymap_parent(keymap, true)
}
/// Check whether MAP is one of MAPS parents.
#[no_mangle]
pub extern "C" fn keymap_memberp(map: LispObject, maps: LispObject) -> bool {
let map = map;
let mut maps = maps;
if map.is_nil() {
return false;
}
while keymapp(maps) && !map.eq(maps) {
maps = keymap_parent(maps, false);
}
map.eq(maps)
}
/// Modify KEYMAP to set its parent map to PARENT.
/// Return PARENT. PARENT should be nil or another keymap.
#[lisp_fn]
pub fn set_keymap_parent(keymap: LispObject, parent: LispObject) -> LispObject {
// Flush any reverse-map cache
unsafe {
where_is_cache = Qnil;
where_is_cache_keymaps = Qt;
}
let mut parent = parent;
let keymap = get_keymap(keymap, true, true);
if parent.is_not_nil() {
parent = get_keymap(parent, true, false);
// Check for cycles
if keymap_memberp(keymap, parent) {
error!("Cyclic keymap inheritance");
}
}
// Skip past the initial element 'keymap'.
let mut prev = LispCons::from(keymap);
let mut list;
loop {
list = prev.cdr();
// If there is a parent keymap here, replace it.
// If we came to the end, add the parent in PREV.
match list.as_cons() {
None => break,
Some(cons) => {
if keymapp(list) {
break;
} else {
prev = cons;
}
}
}
}
prev.check_impure();
prev.set_cdr(parent);
parent
}
/// Return the prompt-string of a keymap MAP.
/// If non-nil, the prompt is shown in the echo-area
/// when reading a key-sequence to be looked-up in this keymap.
#[lisp_fn]
pub fn keymap_prompt(map: LispObject) -> LispObject {
let map = get_keymap(map, false, false);
for elt in map.iter_cars(LispConsEndChecks::off, LispConsCircularChecks::off) {
let mut tem = elt;
if tem.is_string() {
return tem;
} else if keymapp(tem) {
tem = keymap_prompt(tem);
if tem.is_not_nil() {
return tem;
}
}
}
Qnil
}
/// Same as `map_keymap_internal`, but traverses parent keymaps as well.
/// AUTOLOAD indicates that autoloaded keymaps should be loaded.
#[no_mangle]
pub unsafe extern "C" fn map_keymap(
map: LispObject,
fun: map_keymap_function_t,
args: LispObject,
data: *mut c_void,
autoload: bool,
) {
let mut map = get_keymap(map, true, autoload);
while map.is_cons() {
if let Some((car, cdr)) = map.into() {
if keymapp(car) {
map_keymap(car, fun, args, data, autoload);
map = cdr;
} else {
map = map_keymap_internal(map, fun, args, data);
}
}
if !map.is_cons() {
map = get_keymap(map, false, autoload);
}
}
}
/// Call FUNCTION once for each event binding in KEYMAP.
/// FUNCTION is called with two arguments: the event that is bound, and
/// the definition it is bound to. The event may be a character range.
///
/// If KEYMAP has a parent, the parent's bindings are included as well.
/// This works recursively: if the parent has itself a parent, then the
/// grandparent's bindings are also included and so on.
/// usage: (map-keymap FUNCTION KEYMAP)
#[lisp_fn(name = "map-keymap", c_name = "map_keymap", min = "2")]
pub fn map_keymap_lisp(function: LispObject, keymap: LispObject, sort_first: bool) -> LispObject {
if sort_first {
return call!(intern("map-keymap-sorted").into(), function, keymap);
}
unsafe {
map_keymap(
keymap,
Some(map_keymap_call),
function,
ptr::null_mut(),
true,
)
};
Qnil
}
/// Call FUN for every binding in MAP and stop at (and return) the parent.
/// FUN is called with 4 arguments: FUN (KEY, BINDING, ARGS, DATA).
#[no_mangle]
pub unsafe extern "C" fn map_keymap_internal(
map: LispObject,
fun: map_keymap_function_t,
args: LispObject,
data: *mut c_void,
) -> LispObject {
let map = map;
let tail = match map.into() {
None => Qnil,
Some((car, cdr)) => {
if car.eq(Qkeymap) {
cdr
} else {
map
}
}
};
let mut parent = tail;
for tail_cons in tail.iter_tails(LispConsEndChecks::off, LispConsCircularChecks::off) {
let binding = tail_cons.car();
if binding.eq(Qkeymap) {
break;
} else {
// An embedded parent.
if keymapp(binding) {
break;
}
if let Some((car, cdr)) = binding.into() | else if binding.is_vector() {
if let Some(binding_vec) = binding.as_vectorlike() {
for c in 0..binding_vec.pseudovector_size() {
map_keymap_item(fun, args, c.into(), aref(binding, c), data);
}
}
} else if binding.is_char_table() {
let saved = match fun {
Some(f) => make_save_funcptr_ptr_obj(Some(std::mem::transmute(f)), data, args),
None => make_save_funcptr_ptr_obj(None, data, args),
};
map_char_table(Some(map_keymap_char_table_item), Qnil, binding, saved);
}
}
parent = tail_cons.cdr();
}
parent
}
/// Call FUNCTION once for each event binding in KEYMAP.
/// FUNCTION is called with two arguments: the event that is bound, and
/// the definition it is bound to. The event may be a character range.
/// If KEYMAP has a parent, this function returns it without processing it.
#[lisp_fn(name = "map-keymap-internal", c_name = "map_keymap_internal")]
pub fn map_keymap_internal_lisp(function: LispObject, mut keymap: LispObject) -> LispObject {
keymap = get_keymap(keymap, true, true);
unsafe { map_keymap_internal(keymap, Some(map_keymap_call), function, ptr::null_mut()) }
}
/// Return the binding for command KEYS in current local keymap only.
/// KEYS is a string or vector, a sequence of keystrokes.
/// The binding is probably a symbol with a function definition.
/// If optional argument ACCEPT-DEFAULT is non-nil, recognize default
/// bindings; see the description of `lookup-key' for more details about this.
#[lisp_fn(min = "1")]
pub fn local_key_binding(keys: LispObject, accept_default: LispObject) -> LispObject {
let map = current_local_map();
if map.is_nil() {
Qnil
} else {
lookup_key(map, keys, accept_default)
}
}
/// Return current buffer's local keymap, or nil if it has none.
/// Normally the local keymap is set by the major mode with `use-local-map'.
#[lisp_fn]
pub fn current_local_map() -> LispObject {
ThreadState::current_buffer_unchecked().keymap_
}
/// Select KEYMAP as the local keymap.
/// If KEYMAP is nil, that means no local keymap.
#[lisp_fn]
pub fn use_local_map(mut keymap: LispObject) {
if !keymap.is_nil() {
let map = get_keymap(keymap, true, true);
keymap = map;
}
ThreadState::current_buffer_unchecked().keymap_ = keymap;
}
/// Return the binding for command KEYS in current global keymap only.
/// KEYS is a string or vector, a sequence of keystrokes.
/// The binding is probably a symbol with a function definition.
/// This function's return values are the same as those of `lookup-key'
/// (which see).
///
/// If optional argument ACCEPT-DEFAULT is non-nil, recognize default
/// bindings; see the description of `lookup-key' for more details about this.
#[lisp_fn(min = "1")]
pub fn global_key_binding(keys: LispObject, accept_default: LispObject) -> LispObject {
let map = current_global_map();
if map.is_nil() {
Qnil
} else {
lookup_key(map, keys, accept_default)
}
}
/// Return the current global keymap.
#[lisp_fn]
pub fn current_global_map() -> LispObject {
unsafe { _current_global_map }
}
/// Select KEYMAP as the global keymap.
#[lisp_fn]
pub fn use_global_map(keymap: LispObject) {
unsafe { _current_global_map = get_keymap(keymap, true, true) };
}
// Value is number if KEY is too long; nil if valid but has no definition.
// GC is possible in this function.
/// In keymap KEYMAP, look up key sequence KEY. Return the definition.
/// A value of nil means undefined. See doc of `define-key'
/// for kinds of definitions.
///
/// A number as value means KEY is "too long";
/// that is, characters or symbols in it except for the last one
/// fail to be a valid sequence of prefix characters in KEYMAP.
/// The number is how many characters at the front of KEY
/// it takes to reach a non-prefix key.
///
/// Normally, `lookup-key' ignores bindings for t, which act as default
/// bindings, used when nothing else in the keymap applies; this makes it
/// usable as a general function for probing keymaps. However, if the
/// third optional argument ACCEPT-DEFAULT is non-nil, `lookup-key' will
/// recognize the default bindings, just as `read-key-sequence' does.
#[lisp_fn(min = "2")]
pub fn lookup_key(keymap: LispObject, key: LispObject, accept_default: LispObject) -> LispObject {
let ok = accept_default.is_not_nil();
let mut keymap = get_keymap(keymap, true, true);
let length = key.as_vector_or_string_length() as EmacsInt;
if length == 0 {
return keymap;
}
let mut idx = 0;
loop {
let mut c = aref(key, idx);
idx += 1;
if c.is_cons() && lucid_event_type_list_p(c.into()) {
c = unsafe { Fevent_convert_list(c) };
}
// Turn the 8th bit of string chars into a meta modifier.
if let Some(k) = key.as_string() {
if let Some(x) = c.as_fixnum() {
let x = x as u32;
if x & 0x80 != 0 && !k.is_multibyte() {
c = ((x | char_bits::CHAR_META) & !0x80).into();
}
}
}
// Allow string since binding for `menu-bar-select-buffer'
// includes the buffer name in the key sequence.
if !(c.is_fixnum() || c.is_symbol() || c.is_cons() || c.is_string()) {
message_with_string!("Key sequence contains invalid event %s", c, true);
}
let cmd = unsafe { access_keymap(keymap, c, ok, false, true) };
if idx == length {
return cmd;
}
keymap = get_keymap(cmd, false, true);
if !keymap.is_cons() {
return idx.into();
}
unsafe {
maybe_quit();
};
}
}
/// Define COMMAND as a prefix command. COMMAND should be a symbol.
/// A new sparse keymap is stored as COMMAND's function definition and its
/// value.
/// This prepares COMMAND for use as a prefix key's binding.
/// If a second optional argument MAPVAR is given, it should be a symbol.
/// The map is then stored as MAPVAR's value instead of as COMMAND's
/// value; but COMMAND is still defined as a function.
/// The third optional argument NAME, if given, supplies a menu name
/// string for the map. This is required to use the keymap as a menu.
/// This function returns COMMAND.
#[lisp_fn(min = "1")]
pub fn define_prefix_command(
command: LispSymbolRef,
mapvar: LispObject,
name: LispObject,
) -> LispSymbolRef {
let map = make_sparse_keymap(name);
fset(command, map);
if mapvar.is_not_nil() {
set(mapvar.into(), map);
} else {
set(command, map);
}
command
}
/// Construct and return a new sparse keymap.
/// Its car is `keymap' and its cdr is an alist of (CHAR . DEFINITION),
/// which binds the character CHAR to DEFINITION, or (SYMBOL . DEFINITION),
/// which binds the function key or mouse event SYMBOL to DEFINITION.
/// Initially the alist is nil.
///
/// The optional arg STRING supplies a menu name for the keymap
/// in case you use it as a menu with `x-popup-menu'.
#[lisp_fn(min = "0")]
pub fn make_sparse_keymap(string: LispObject) -> LispObject {
if string.is_not_nil() {
let s = if unsafe { globals.Vpurify_flag }.is_not_nil() {
unsafe { Fpurecopy(string) }
} else {
string
};
list!(Qkeymap, s)
} else {
list!(Qkeymap)
}
}
#[no_mangle]
pub extern "C" fn describe_vector_princ(elt: LispObject, fun: LispObject) {
indent_to(16, 1.into());
call!(fun, elt);
unsafe { Fterpri(Qnil, Qnil) };
}
/// Insert a description of contents of VECTOR.
/// This is text showing the elements of vector matched against indices.
/// DESCRIBER is the output function used; nil means use `princ'.
#[lisp_fn(min = "1", name = "describe-vector", c_name = "describe_vector")]
pub fn describe_vector_lisp(vector: LispObject, mut describer: LispObject) {
if describer.is_nil() {
describer = intern("princ").into();
}
unsafe { specbind(Qstandard_output, current_buffer()) };
if !(vector.is_vector() || vector.is_char_table()) {
wrong_type!(Qvector_or_char_table_p, vector);
}
let count = c_specpdl_index();
unsafe {
describe_vector(
vector,
Qnil,
describer,
Some(describe_vector_princ),
false,
Qnil,
Qnil,
false,
false,
)
};
unbind_to(count, Qnil);
}
#[no_mangle]
pub extern "C" fn copy_keymap_1(chartable: LispObject, idx: LispObject, elt: LispObject) {
unsafe { Fset_char_table_range(chartable, idx, copy_keymap_item(elt)) };
}
/// Return a copy of the keymap KEYMAP.
///
/// Note that this is almost never needed. If you want a keymap that's like
/// another yet with a few changes, you should use map inheritance rather
/// than copying. I.e. something like:
///
/// (let ((map (make-sparse-keymap)))
/// (set-keymap-parent map <theirmap>)
/// (define-key map ...)
/// ...)
///
/// After performing `copy-keymap', the copy starts out with the same definitions
/// of KEYMAP, but changing either the copy or KEYMAP does not affect the other.
/// Any key definitions that are subkeymaps are recursively copied.
/// However, a key definition which is a symbol whose definition is a keymap
/// is not copied.
#[lisp_fn]
pub fn copy_keymap(keymap: LispObject) -> LispObject {
let keymap = get_keymap(keymap, true, false);
let mut tail = list!(Qkeymap);
let copy = tail;
let (_, mut keymap) = keymap.into(); // Skip the `keymap' symbol.
while let Some((mut elt, kmd)) = keymap.into() {
if elt.eq(Qkeymap) {
break;
}
if elt.is_char_table() {
elt = unsafe { Fcopy_sequence(elt) };
unsafe { map_char_table(Some(copy_keymap_1), Qnil, elt, elt) };
} else if let Some(v) = elt.as_vector() {
elt = unsafe { Fcopy_sequence(elt) };
let mut v2 = elt.as_vector().unwrap();
for (i, obj) in v.iter().enumerate() {
v2.set(i, unsafe { copy_keymap_item(obj) });
}
} else if let Some((front, back)) = elt.into() {
if front.eq(Qkeymap) {
// This is a sub keymap
elt = copy_keymap(elt);
} else {
elt = (front, unsafe { copy_keymap_item(back) }).into();
}
}
setcdr(tail.into(), list!(elt));
tail = LispCons::from(tail).cdr();
keymap = kmd;
}
setcdr(tail.into(), keymap);
copy
}
// GC is possible in this funtion if it autoloads a keymap.
/// Return the binding for command KEY in current keymaps.
/// KEY is a string or vector, a sequence of keystrokes.
/// The binding is probably a symbol with a function definition.
///
/// Normally, `key-binding' ignores bindings for t, which act as default
/// bindings, used when nothing else in the keymap applies; this makes it
/// usable as a general function for probing keymaps. However, if the
/// optional second argument ACCEPT-DEFAULT is non-nil, `key-binding' does
/// recognize the default bindings, just as `read-key-sequence' does.
///
/// Like the normal command loop, `key-binding' will remap the command
/// resulting from looking up KEY by looking up the command in the
/// current keymaps. However, if the optional third argument NO-REMAP
/// is non-nil, `key-binding' returns the unmapped command.
///
/// If KEY is a key sequence initiated with the mouse, the used keymaps
/// will depend on the clicked mouse position with regard to the buffer
/// and possible local keymaps on strings.
///
/// If the optional argument POSITION is non-nil, it specifies a mouse
/// position as returned by `event-start' and `event-end', and the lookup
/// occurs in the keymaps associated with it instead of KEY. It can also
/// be a number or marker, in which case the keymap properties at the
/// specified buffer position instead of point are used.
#[lisp_fn(min = "1")]
pub fn key_binding(
key: LispObject,
accept_default: bool,
no_remap: bool,
mut position: LispObject,
) -> LispObject {
if key.is_vector() && position.is_nil() {
let key = key.force_vector();
if key.len() == 0 {
return Qnil;
}
// Mouse events may have a symbolic prefix indicating the scrollbar or modeline
let idx = if key.get(0).is_symbol() && key.len() > 1 {
1
} else {
0
};
let event: keyboard::Event = key.get(idx).into();
// Ignore locations without event data
if event.has_data() {
let kind = event.head_kind();
if kind == Qmouse_click {
position = event.start()
}
}
}
let value = lookup_key(
(Qkeymap, unsafe { Fcurrent_active_maps(Qt, position) }).into(),
key,
accept_default.into(),
);
if value.is_nil() || value.is_integer() {
return Qnil;
}
// If the result of the ordinary keymap lookup is an interactive
// command, look for a key binding (i.e. remapping) for that command.
if !no_remap && value.is_symbol() {
let remap = unsafe { Fcommand_remapping(value, position, Qnil) };
if remap.is_not_nil() {
return remap;
}
}
value
}
include!(concat!(env!("OUT_DIR"), "/keymap_exports.rs"));
| {
map_keymap_item(fun, args, car, cdr, data);
} | conditional_block |
keymap.rs | //! Keymap support
use std;
use std::ptr;
use libc::c_void;
use remacs_macros::lisp_fn;
use crate::{
buffers::current_buffer,
data::{aref, fset, indirect_function, set},
eval::{autoload_do_load, unbind_to},
indent::indent_to,
keyboard,
keyboard::lucid_event_type_list_p,
lisp::LispObject,
lists::{nth, setcdr},
lists::{LispCons, LispConsCircularChecks, LispConsEndChecks},
obarray::intern,
remacs_sys::{
access_keymap, copy_keymap_item, describe_vector, make_save_funcptr_ptr_obj,
map_char_table, map_keymap_call, map_keymap_char_table_item, map_keymap_function_t,
map_keymap_item, maybe_quit, specbind,
},
remacs_sys::{char_bits, current_global_map as _current_global_map, globals, EmacsInt},
remacs_sys::{
Fcommand_remapping, Fcopy_sequence, Fcurrent_active_maps, Fevent_convert_list,
Fmake_char_table, Fpurecopy, Fset_char_table_range, Fterpri,
},
remacs_sys::{
Qautoload, Qkeymap, Qkeymapp, Qmouse_click, Qnil, Qstandard_output, Qt,
Qvector_or_char_table_p,
},
symbols::LispSymbolRef,
threads::{c_specpdl_index, ThreadState},
};
pub fn Ctl(c: char) -> i32 {
(c as i32) & 0x1f
}
// Hash table used to cache a reverse-map to speed up calls to where-is.
declare_GC_protected_static!(where_is_cache, Qnil);
/// Allows the C code to get the value of `where_is_cache`
#[no_mangle]
pub extern "C" fn get_where_is_cache() -> LispObject {
unsafe { where_is_cache }
}
/// Allows the C code to set the value of `where_is_cache`
#[no_mangle]
pub extern "C" fn set_where_is_cache(val: LispObject) {
unsafe {
where_is_cache = val;
}
}
// Which keymaps are reverse-stored in the cache.
declare_GC_protected_static!(where_is_cache_keymaps, Qt);
/// Allows the C code to get the value of `where_is_cache_keymaps`
#[no_mangle]
pub extern "C" fn get_where_is_cache_keymaps() -> LispObject {
unsafe { where_is_cache_keymaps }
}
/// Allows the C code to set the value of `where_is_cache_keymaps`
#[no_mangle]
pub extern "C" fn set_where_is_cache_keymaps(val: LispObject) {
unsafe {
where_is_cache_keymaps = val;
}
}
/// Check that OBJECT is a keymap (after dereferencing through any
/// symbols). If it is, return it.
///
/// If AUTOLOAD and if OBJECT is a symbol whose function value
/// is an autoload form, do the autoload and try again.
/// If AUTOLOAD, callers must assume GC is possible.
///
/// `ERROR_IF_NOT_KEYMAP` controls how we respond if OBJECT isn't a keymap.
/// If `ERROR_IF_NOT_KEYMAP`, signal an error; otherwise,
/// just return Qnil.
///
/// Note that most of the time, we don't want to pursue autoloads.
/// Functions like `Faccessible_keymaps` which scan entire keymap trees
/// shouldn't load every autoloaded keymap. I'm not sure about this,
/// but it seems to me that only `read_key_sequence`, `Flookup_key`, and
/// `Fdefine_key` should cause keymaps to be autoloaded.
///
/// This function can GC when AUTOLOAD is true, because it calls
/// `Fautoload_do_load` which can GC.
#[no_mangle]
pub extern "C" fn get_keymap(
object: LispObject,
error_if_not_keymap: bool,
autoload: bool,
) -> LispObject {
let object = object;
let mut autoload_retry = true;
while autoload_retry {
autoload_retry = false;
if object.is_nil() {
break;
}
if let Some((car, _)) = object.into() {
if car.eq(Qkeymap) {
return object;
}
}
let tem = indirect_function(object);
if let Some((car, _)) = tem.into() {
if car.eq(Qkeymap) {
return tem;
}
// Should we do an autoload? Autoload forms for keymaps have
// Qkeymap as their fifth element.
if (autoload || !error_if_not_keymap) && car.eq(Qautoload) && object.is_symbol() {
let tail = nth(4, tem);
if tail.eq(Qkeymap) {
if autoload {
autoload_do_load(tem, object, Qnil);
autoload_retry = true;
} else {
return object;
}
}
}
}
}
if error_if_not_keymap {
wrong_type!(Qkeymapp, object);
}
Qnil |
/// Construct and return a new keymap, of the form (keymap CHARTABLE . ALIST).
/// CHARTABLE is a char-table that holds the bindings for all characters
/// without modifiers. All entries in it are initially nil, meaning
/// "command undefined". ALIST is an assoc-list which holds bindings for
/// function keys, mouse events, and any other things that appear in the
/// input stream. Initially, ALIST is nil.
///
/// The optional arg STRING supplies a menu name for the keymap
/// in case you use it as a menu with `x-popup-menu'.
#[lisp_fn(min = "0")]
pub fn make_keymap(string: LispObject) -> (LispObject, (LispObject, LispObject)) {
let tail: LispObject = if string.is_not_nil() {
list!(string)
} else {
Qnil
};
let char_table = unsafe { Fmake_char_table(Qkeymap, Qnil) };
(Qkeymap, (char_table, tail))
}
/// Return t if OBJECT is a keymap.
///
/// A keymap is a list (keymap . ALIST),
/// or a symbol whose function definition is itself a keymap.
/// ALIST elements look like (CHAR . DEFN) or (SYMBOL . DEFN);
/// a vector of densely packed bindings for small character codes
/// is also allowed as an element.
#[lisp_fn]
pub fn keymapp(object: LispObject) -> bool {
let map = get_keymap(object, false, false);
map.is_not_nil()
}
/// Return the parent map of KEYMAP, or nil if it has none.
/// We assume that KEYMAP is a valid keymap.
#[no_mangle]
pub extern "C" fn keymap_parent(keymap: LispObject, autoload: bool) -> LispObject {
let map = get_keymap(keymap, true, autoload);
let mut current = Qnil;
for elt in map.iter_tails(LispConsEndChecks::off, LispConsCircularChecks::off) {
current = elt.cdr();
if keymapp(current) {
return current;
}
}
get_keymap(current, false, autoload)
}
/// Return the parent keymap of KEYMAP.
/// If KEYMAP has no parent, return nil.
#[lisp_fn(name = "keymap-parent", c_name = "keymap_parent")]
pub fn keymap_parent_lisp(keymap: LispObject) -> LispObject {
keymap_parent(keymap, true)
}
/// Check whether MAP is one of MAPS parents.
#[no_mangle]
pub extern "C" fn keymap_memberp(map: LispObject, maps: LispObject) -> bool {
let map = map;
let mut maps = maps;
if map.is_nil() {
return false;
}
while keymapp(maps) && !map.eq(maps) {
maps = keymap_parent(maps, false);
}
map.eq(maps)
}
/// Modify KEYMAP to set its parent map to PARENT.
/// Return PARENT. PARENT should be nil or another keymap.
#[lisp_fn]
pub fn set_keymap_parent(keymap: LispObject, parent: LispObject) -> LispObject {
// Flush any reverse-map cache
unsafe {
where_is_cache = Qnil;
where_is_cache_keymaps = Qt;
}
let mut parent = parent;
let keymap = get_keymap(keymap, true, true);
if parent.is_not_nil() {
parent = get_keymap(parent, true, false);
// Check for cycles
if keymap_memberp(keymap, parent) {
error!("Cyclic keymap inheritance");
}
}
// Skip past the initial element 'keymap'.
let mut prev = LispCons::from(keymap);
let mut list;
loop {
list = prev.cdr();
// If there is a parent keymap here, replace it.
// If we came to the end, add the parent in PREV.
match list.as_cons() {
None => break,
Some(cons) => {
if keymapp(list) {
break;
} else {
prev = cons;
}
}
}
}
prev.check_impure();
prev.set_cdr(parent);
parent
}
/// Return the prompt-string of a keymap MAP.
/// If non-nil, the prompt is shown in the echo-area
/// when reading a key-sequence to be looked-up in this keymap.
#[lisp_fn]
pub fn keymap_prompt(map: LispObject) -> LispObject {
let map = get_keymap(map, false, false);
for elt in map.iter_cars(LispConsEndChecks::off, LispConsCircularChecks::off) {
let mut tem = elt;
if tem.is_string() {
return tem;
} else if keymapp(tem) {
tem = keymap_prompt(tem);
if tem.is_not_nil() {
return tem;
}
}
}
Qnil
}
/// Same as `map_keymap_internal`, but traverses parent keymaps as well.
/// AUTOLOAD indicates that autoloaded keymaps should be loaded.
#[no_mangle]
pub unsafe extern "C" fn map_keymap(
map: LispObject,
fun: map_keymap_function_t,
args: LispObject,
data: *mut c_void,
autoload: bool,
) {
let mut map = get_keymap(map, true, autoload);
while map.is_cons() {
if let Some((car, cdr)) = map.into() {
if keymapp(car) {
map_keymap(car, fun, args, data, autoload);
map = cdr;
} else {
map = map_keymap_internal(map, fun, args, data);
}
}
if !map.is_cons() {
map = get_keymap(map, false, autoload);
}
}
}
/// Call FUNCTION once for each event binding in KEYMAP.
/// FUNCTION is called with two arguments: the event that is bound, and
/// the definition it is bound to. The event may be a character range.
///
/// If KEYMAP has a parent, the parent's bindings are included as well.
/// This works recursively: if the parent has itself a parent, then the
/// grandparent's bindings are also included and so on.
/// usage: (map-keymap FUNCTION KEYMAP)
#[lisp_fn(name = "map-keymap", c_name = "map_keymap", min = "2")]
pub fn map_keymap_lisp(function: LispObject, keymap: LispObject, sort_first: bool) -> LispObject {
if sort_first {
return call!(intern("map-keymap-sorted").into(), function, keymap);
}
unsafe {
map_keymap(
keymap,
Some(map_keymap_call),
function,
ptr::null_mut(),
true,
)
};
Qnil
}
/// Call FUN for every binding in MAP and stop at (and return) the parent.
/// FUN is called with 4 arguments: FUN (KEY, BINDING, ARGS, DATA).
#[no_mangle]
pub unsafe extern "C" fn map_keymap_internal(
map: LispObject,
fun: map_keymap_function_t,
args: LispObject,
data: *mut c_void,
) -> LispObject {
let map = map;
let tail = match map.into() {
None => Qnil,
Some((car, cdr)) => {
if car.eq(Qkeymap) {
cdr
} else {
map
}
}
};
let mut parent = tail;
for tail_cons in tail.iter_tails(LispConsEndChecks::off, LispConsCircularChecks::off) {
let binding = tail_cons.car();
if binding.eq(Qkeymap) {
break;
} else {
// An embedded parent.
if keymapp(binding) {
break;
}
if let Some((car, cdr)) = binding.into() {
map_keymap_item(fun, args, car, cdr, data);
} else if binding.is_vector() {
if let Some(binding_vec) = binding.as_vectorlike() {
for c in 0..binding_vec.pseudovector_size() {
map_keymap_item(fun, args, c.into(), aref(binding, c), data);
}
}
} else if binding.is_char_table() {
let saved = match fun {
Some(f) => make_save_funcptr_ptr_obj(Some(std::mem::transmute(f)), data, args),
None => make_save_funcptr_ptr_obj(None, data, args),
};
map_char_table(Some(map_keymap_char_table_item), Qnil, binding, saved);
}
}
parent = tail_cons.cdr();
}
parent
}
/// Call FUNCTION once for each event binding in KEYMAP.
/// FUNCTION is called with two arguments: the event that is bound, and
/// the definition it is bound to. The event may be a character range.
/// If KEYMAP has a parent, this function returns it without processing it.
#[lisp_fn(name = "map-keymap-internal", c_name = "map_keymap_internal")]
pub fn map_keymap_internal_lisp(function: LispObject, mut keymap: LispObject) -> LispObject {
keymap = get_keymap(keymap, true, true);
unsafe { map_keymap_internal(keymap, Some(map_keymap_call), function, ptr::null_mut()) }
}
/// Return the binding for command KEYS in current local keymap only.
/// KEYS is a string or vector, a sequence of keystrokes.
/// The binding is probably a symbol with a function definition.
/// If optional argument ACCEPT-DEFAULT is non-nil, recognize default
/// bindings; see the description of `lookup-key' for more details about this.
#[lisp_fn(min = "1")]
pub fn local_key_binding(keys: LispObject, accept_default: LispObject) -> LispObject {
let map = current_local_map();
if map.is_nil() {
Qnil
} else {
lookup_key(map, keys, accept_default)
}
}
/// Return current buffer's local keymap, or nil if it has none.
/// Normally the local keymap is set by the major mode with `use-local-map'.
#[lisp_fn]
pub fn current_local_map() -> LispObject {
ThreadState::current_buffer_unchecked().keymap_
}
/// Select KEYMAP as the local keymap.
/// If KEYMAP is nil, that means no local keymap.
#[lisp_fn]
pub fn use_local_map(mut keymap: LispObject) {
if !keymap.is_nil() {
let map = get_keymap(keymap, true, true);
keymap = map;
}
ThreadState::current_buffer_unchecked().keymap_ = keymap;
}
/// Return the binding for command KEYS in current global keymap only.
/// KEYS is a string or vector, a sequence of keystrokes.
/// The binding is probably a symbol with a function definition.
/// This function's return values are the same as those of `lookup-key'
/// (which see).
///
/// If optional argument ACCEPT-DEFAULT is non-nil, recognize default
/// bindings; see the description of `lookup-key' for more details about this.
#[lisp_fn(min = "1")]
pub fn global_key_binding(keys: LispObject, accept_default: LispObject) -> LispObject {
let map = current_global_map();
if map.is_nil() {
Qnil
} else {
lookup_key(map, keys, accept_default)
}
}
/// Return the current global keymap.
#[lisp_fn]
pub fn current_global_map() -> LispObject {
unsafe { _current_global_map }
}
/// Select KEYMAP as the global keymap.
#[lisp_fn]
pub fn use_global_map(keymap: LispObject) {
unsafe { _current_global_map = get_keymap(keymap, true, true) };
}
// Value is number if KEY is too long; nil if valid but has no definition.
// GC is possible in this function.
/// In keymap KEYMAP, look up key sequence KEY. Return the definition.
/// A value of nil means undefined. See doc of `define-key'
/// for kinds of definitions.
///
/// A number as value means KEY is "too long";
/// that is, characters or symbols in it except for the last one
/// fail to be a valid sequence of prefix characters in KEYMAP.
/// The number is how many characters at the front of KEY
/// it takes to reach a non-prefix key.
///
/// Normally, `lookup-key' ignores bindings for t, which act as default
/// bindings, used when nothing else in the keymap applies; this makes it
/// usable as a general function for probing keymaps. However, if the
/// third optional argument ACCEPT-DEFAULT is non-nil, `lookup-key' will
/// recognize the default bindings, just as `read-key-sequence' does.
#[lisp_fn(min = "2")]
pub fn lookup_key(keymap: LispObject, key: LispObject, accept_default: LispObject) -> LispObject {
let ok = accept_default.is_not_nil();
let mut keymap = get_keymap(keymap, true, true);
let length = key.as_vector_or_string_length() as EmacsInt;
if length == 0 {
return keymap;
}
let mut idx = 0;
loop {
let mut c = aref(key, idx);
idx += 1;
if c.is_cons() && lucid_event_type_list_p(c.into()) {
c = unsafe { Fevent_convert_list(c) };
}
// Turn the 8th bit of string chars into a meta modifier.
if let Some(k) = key.as_string() {
if let Some(x) = c.as_fixnum() {
let x = x as u32;
if x & 0x80 != 0 && !k.is_multibyte() {
c = ((x | char_bits::CHAR_META) & !0x80).into();
}
}
}
// Allow string since binding for `menu-bar-select-buffer'
// includes the buffer name in the key sequence.
if !(c.is_fixnum() || c.is_symbol() || c.is_cons() || c.is_string()) {
message_with_string!("Key sequence contains invalid event %s", c, true);
}
let cmd = unsafe { access_keymap(keymap, c, ok, false, true) };
if idx == length {
return cmd;
}
keymap = get_keymap(cmd, false, true);
if !keymap.is_cons() {
return idx.into();
}
unsafe {
maybe_quit();
};
}
}
/// Define COMMAND as a prefix command. COMMAND should be a symbol.
/// A new sparse keymap is stored as COMMAND's function definition and its
/// value.
/// This prepares COMMAND for use as a prefix key's binding.
/// If a second optional argument MAPVAR is given, it should be a symbol.
/// The map is then stored as MAPVAR's value instead of as COMMAND's
/// value; but COMMAND is still defined as a function.
/// The third optional argument NAME, if given, supplies a menu name
/// string for the map. This is required to use the keymap as a menu.
/// This function returns COMMAND.
#[lisp_fn(min = "1")]
pub fn define_prefix_command(
command: LispSymbolRef,
mapvar: LispObject,
name: LispObject,
) -> LispSymbolRef {
let map = make_sparse_keymap(name);
fset(command, map);
if mapvar.is_not_nil() {
set(mapvar.into(), map);
} else {
set(command, map);
}
command
}
/// Construct and return a new sparse keymap.
/// Its car is `keymap' and its cdr is an alist of (CHAR . DEFINITION),
/// which binds the character CHAR to DEFINITION, or (SYMBOL . DEFINITION),
/// which binds the function key or mouse event SYMBOL to DEFINITION.
/// Initially the alist is nil.
///
/// The optional arg STRING supplies a menu name for the keymap
/// in case you use it as a menu with `x-popup-menu'.
#[lisp_fn(min = "0")]
pub fn make_sparse_keymap(string: LispObject) -> LispObject {
if string.is_not_nil() {
let s = if unsafe { globals.Vpurify_flag }.is_not_nil() {
unsafe { Fpurecopy(string) }
} else {
string
};
list!(Qkeymap, s)
} else {
list!(Qkeymap)
}
}
#[no_mangle]
pub extern "C" fn describe_vector_princ(elt: LispObject, fun: LispObject) {
indent_to(16, 1.into());
call!(fun, elt);
unsafe { Fterpri(Qnil, Qnil) };
}
/// Insert a description of contents of VECTOR.
/// This is text showing the elements of vector matched against indices.
/// DESCRIBER is the output function used; nil means use `princ'.
#[lisp_fn(min = "1", name = "describe-vector", c_name = "describe_vector")]
pub fn describe_vector_lisp(vector: LispObject, mut describer: LispObject) {
if describer.is_nil() {
describer = intern("princ").into();
}
unsafe { specbind(Qstandard_output, current_buffer()) };
if !(vector.is_vector() || vector.is_char_table()) {
wrong_type!(Qvector_or_char_table_p, vector);
}
let count = c_specpdl_index();
unsafe {
describe_vector(
vector,
Qnil,
describer,
Some(describe_vector_princ),
false,
Qnil,
Qnil,
false,
false,
)
};
unbind_to(count, Qnil);
}
#[no_mangle]
pub extern "C" fn copy_keymap_1(chartable: LispObject, idx: LispObject, elt: LispObject) {
unsafe { Fset_char_table_range(chartable, idx, copy_keymap_item(elt)) };
}
/// Return a copy of the keymap KEYMAP.
///
/// Note that this is almost never needed. If you want a keymap that's like
/// another yet with a few changes, you should use map inheritance rather
/// than copying. I.e. something like:
///
/// (let ((map (make-sparse-keymap)))
/// (set-keymap-parent map <theirmap>)
/// (define-key map ...)
/// ...)
///
/// After performing `copy-keymap', the copy starts out with the same definitions
/// of KEYMAP, but changing either the copy or KEYMAP does not affect the other.
/// Any key definitions that are subkeymaps are recursively copied.
/// However, a key definition which is a symbol whose definition is a keymap
/// is not copied.
#[lisp_fn]
pub fn copy_keymap(keymap: LispObject) -> LispObject {
let keymap = get_keymap(keymap, true, false);
let mut tail = list!(Qkeymap);
let copy = tail;
let (_, mut keymap) = keymap.into(); // Skip the `keymap' symbol.
while let Some((mut elt, kmd)) = keymap.into() {
if elt.eq(Qkeymap) {
break;
}
if elt.is_char_table() {
elt = unsafe { Fcopy_sequence(elt) };
unsafe { map_char_table(Some(copy_keymap_1), Qnil, elt, elt) };
} else if let Some(v) = elt.as_vector() {
elt = unsafe { Fcopy_sequence(elt) };
let mut v2 = elt.as_vector().unwrap();
for (i, obj) in v.iter().enumerate() {
v2.set(i, unsafe { copy_keymap_item(obj) });
}
} else if let Some((front, back)) = elt.into() {
if front.eq(Qkeymap) {
// This is a sub keymap
elt = copy_keymap(elt);
} else {
elt = (front, unsafe { copy_keymap_item(back) }).into();
}
}
setcdr(tail.into(), list!(elt));
tail = LispCons::from(tail).cdr();
keymap = kmd;
}
setcdr(tail.into(), keymap);
copy
}
// GC is possible in this funtion if it autoloads a keymap.
/// Return the binding for command KEY in current keymaps.
/// KEY is a string or vector, a sequence of keystrokes.
/// The binding is probably a symbol with a function definition.
///
/// Normally, `key-binding' ignores bindings for t, which act as default
/// bindings, used when nothing else in the keymap applies; this makes it
/// usable as a general function for probing keymaps. However, if the
/// optional second argument ACCEPT-DEFAULT is non-nil, `key-binding' does
/// recognize the default bindings, just as `read-key-sequence' does.
///
/// Like the normal command loop, `key-binding' will remap the command
/// resulting from looking up KEY by looking up the command in the
/// current keymaps. However, if the optional third argument NO-REMAP
/// is non-nil, `key-binding' returns the unmapped command.
///
/// If KEY is a key sequence initiated with the mouse, the used keymaps
/// will depend on the clicked mouse position with regard to the buffer
/// and possible local keymaps on strings.
///
/// If the optional argument POSITION is non-nil, it specifies a mouse
/// position as returned by `event-start' and `event-end', and the lookup
/// occurs in the keymaps associated with it instead of KEY. It can also
/// be a number or marker, in which case the keymap properties at the
/// specified buffer position instead of point are used.
#[lisp_fn(min = "1")]
pub fn key_binding(
key: LispObject,
accept_default: bool,
no_remap: bool,
mut position: LispObject,
) -> LispObject {
if key.is_vector() && position.is_nil() {
let key = key.force_vector();
if key.len() == 0 {
return Qnil;
}
// Mouse events may have a symbolic prefix indicating the scrollbar or modeline
let idx = if key.get(0).is_symbol() && key.len() > 1 {
1
} else {
0
};
let event: keyboard::Event = key.get(idx).into();
// Ignore locations without event data
if event.has_data() {
let kind = event.head_kind();
if kind == Qmouse_click {
position = event.start()
}
}
}
let value = lookup_key(
(Qkeymap, unsafe { Fcurrent_active_maps(Qt, position) }).into(),
key,
accept_default.into(),
);
if value.is_nil() || value.is_integer() {
return Qnil;
}
// If the result of the ordinary keymap lookup is an interactive
// command, look for a key binding (i.e. remapping) for that command.
if !no_remap && value.is_symbol() {
let remap = unsafe { Fcommand_remapping(value, position, Qnil) };
if remap.is_not_nil() {
return remap;
}
}
value
}
include!(concat!(env!("OUT_DIR"), "/keymap_exports.rs")); | } | random_line_split |
keymap.rs | //! Keymap support
use std;
use std::ptr;
use libc::c_void;
use remacs_macros::lisp_fn;
use crate::{
buffers::current_buffer,
data::{aref, fset, indirect_function, set},
eval::{autoload_do_load, unbind_to},
indent::indent_to,
keyboard,
keyboard::lucid_event_type_list_p,
lisp::LispObject,
lists::{nth, setcdr},
lists::{LispCons, LispConsCircularChecks, LispConsEndChecks},
obarray::intern,
remacs_sys::{
access_keymap, copy_keymap_item, describe_vector, make_save_funcptr_ptr_obj,
map_char_table, map_keymap_call, map_keymap_char_table_item, map_keymap_function_t,
map_keymap_item, maybe_quit, specbind,
},
remacs_sys::{char_bits, current_global_map as _current_global_map, globals, EmacsInt},
remacs_sys::{
Fcommand_remapping, Fcopy_sequence, Fcurrent_active_maps, Fevent_convert_list,
Fmake_char_table, Fpurecopy, Fset_char_table_range, Fterpri,
},
remacs_sys::{
Qautoload, Qkeymap, Qkeymapp, Qmouse_click, Qnil, Qstandard_output, Qt,
Qvector_or_char_table_p,
},
symbols::LispSymbolRef,
threads::{c_specpdl_index, ThreadState},
};
pub fn Ctl(c: char) -> i32 {
(c as i32) & 0x1f
}
// Hash table used to cache a reverse-map to speed up calls to where-is.
declare_GC_protected_static!(where_is_cache, Qnil);
/// Allows the C code to get the value of `where_is_cache`
#[no_mangle]
pub extern "C" fn get_where_is_cache() -> LispObject {
unsafe { where_is_cache }
}
/// Allows the C code to set the value of `where_is_cache`
#[no_mangle]
pub extern "C" fn set_where_is_cache(val: LispObject) {
unsafe {
where_is_cache = val;
}
}
// Which keymaps are reverse-stored in the cache.
declare_GC_protected_static!(where_is_cache_keymaps, Qt);
/// Allows the C code to get the value of `where_is_cache_keymaps`
#[no_mangle]
pub extern "C" fn get_where_is_cache_keymaps() -> LispObject {
unsafe { where_is_cache_keymaps }
}
/// Allows the C code to set the value of `where_is_cache_keymaps`
#[no_mangle]
pub extern "C" fn set_where_is_cache_keymaps(val: LispObject) {
unsafe {
where_is_cache_keymaps = val;
}
}
/// Check that OBJECT is a keymap (after dereferencing through any
/// symbols). If it is, return it.
///
/// If AUTOLOAD and if OBJECT is a symbol whose function value
/// is an autoload form, do the autoload and try again.
/// If AUTOLOAD, callers must assume GC is possible.
///
/// `ERROR_IF_NOT_KEYMAP` controls how we respond if OBJECT isn't a keymap.
/// If `ERROR_IF_NOT_KEYMAP`, signal an error; otherwise,
/// just return Qnil.
///
/// Note that most of the time, we don't want to pursue autoloads.
/// Functions like `Faccessible_keymaps` which scan entire keymap trees
/// shouldn't load every autoloaded keymap. I'm not sure about this,
/// but it seems to me that only `read_key_sequence`, `Flookup_key`, and
/// `Fdefine_key` should cause keymaps to be autoloaded.
///
/// This function can GC when AUTOLOAD is true, because it calls
/// `Fautoload_do_load` which can GC.
#[no_mangle]
pub extern "C" fn get_keymap(
object: LispObject,
error_if_not_keymap: bool,
autoload: bool,
) -> LispObject {
let object = object;
let mut autoload_retry = true;
while autoload_retry {
autoload_retry = false;
if object.is_nil() {
break;
}
if let Some((car, _)) = object.into() {
if car.eq(Qkeymap) {
return object;
}
}
let tem = indirect_function(object);
if let Some((car, _)) = tem.into() {
if car.eq(Qkeymap) {
return tem;
}
// Should we do an autoload? Autoload forms for keymaps have
// Qkeymap as their fifth element.
if (autoload || !error_if_not_keymap) && car.eq(Qautoload) && object.is_symbol() {
let tail = nth(4, tem);
if tail.eq(Qkeymap) {
if autoload {
autoload_do_load(tem, object, Qnil);
autoload_retry = true;
} else {
return object;
}
}
}
}
}
if error_if_not_keymap {
wrong_type!(Qkeymapp, object);
}
Qnil
}
/// Construct and return a new keymap, of the form (keymap CHARTABLE . ALIST).
/// CHARTABLE is a char-table that holds the bindings for all characters
/// without modifiers. All entries in it are initially nil, meaning
/// "command undefined". ALIST is an assoc-list which holds bindings for
/// function keys, mouse events, and any other things that appear in the
/// input stream. Initially, ALIST is nil.
///
/// The optional arg STRING supplies a menu name for the keymap
/// in case you use it as a menu with `x-popup-menu'.
#[lisp_fn(min = "0")]
pub fn make_keymap(string: LispObject) -> (LispObject, (LispObject, LispObject)) {
let tail: LispObject = if string.is_not_nil() {
list!(string)
} else {
Qnil
};
let char_table = unsafe { Fmake_char_table(Qkeymap, Qnil) };
(Qkeymap, (char_table, tail))
}
/// Return t if OBJECT is a keymap.
///
/// A keymap is a list (keymap . ALIST),
/// or a symbol whose function definition is itself a keymap.
/// ALIST elements look like (CHAR . DEFN) or (SYMBOL . DEFN);
/// a vector of densely packed bindings for small character codes
/// is also allowed as an element.
#[lisp_fn]
pub fn keymapp(object: LispObject) -> bool {
let map = get_keymap(object, false, false);
map.is_not_nil()
}
/// Return the parent map of KEYMAP, or nil if it has none.
/// We assume that KEYMAP is a valid keymap.
#[no_mangle]
pub extern "C" fn keymap_parent(keymap: LispObject, autoload: bool) -> LispObject {
let map = get_keymap(keymap, true, autoload);
let mut current = Qnil;
for elt in map.iter_tails(LispConsEndChecks::off, LispConsCircularChecks::off) {
current = elt.cdr();
if keymapp(current) {
return current;
}
}
get_keymap(current, false, autoload)
}
/// Return the parent keymap of KEYMAP.
/// If KEYMAP has no parent, return nil.
#[lisp_fn(name = "keymap-parent", c_name = "keymap_parent")]
pub fn keymap_parent_lisp(keymap: LispObject) -> LispObject {
keymap_parent(keymap, true)
}
/// Check whether MAP is one of MAPS parents.
#[no_mangle]
pub extern "C" fn keymap_memberp(map: LispObject, maps: LispObject) -> bool {
let map = map;
let mut maps = maps;
if map.is_nil() {
return false;
}
while keymapp(maps) && !map.eq(maps) {
maps = keymap_parent(maps, false);
}
map.eq(maps)
}
/// Modify KEYMAP to set its parent map to PARENT.
/// Return PARENT. PARENT should be nil or another keymap.
#[lisp_fn]
pub fn set_keymap_parent(keymap: LispObject, parent: LispObject) -> LispObject {
// Flush any reverse-map cache
unsafe {
where_is_cache = Qnil;
where_is_cache_keymaps = Qt;
}
let mut parent = parent;
let keymap = get_keymap(keymap, true, true);
if parent.is_not_nil() {
parent = get_keymap(parent, true, false);
// Check for cycles
if keymap_memberp(keymap, parent) {
error!("Cyclic keymap inheritance");
}
}
// Skip past the initial element 'keymap'.
let mut prev = LispCons::from(keymap);
let mut list;
loop {
list = prev.cdr();
// If there is a parent keymap here, replace it.
// If we came to the end, add the parent in PREV.
match list.as_cons() {
None => break,
Some(cons) => {
if keymapp(list) {
break;
} else {
prev = cons;
}
}
}
}
prev.check_impure();
prev.set_cdr(parent);
parent
}
/// Return the prompt-string of a keymap MAP.
/// If non-nil, the prompt is shown in the echo-area
/// when reading a key-sequence to be looked-up in this keymap.
#[lisp_fn]
pub fn keymap_prompt(map: LispObject) -> LispObject {
let map = get_keymap(map, false, false);
for elt in map.iter_cars(LispConsEndChecks::off, LispConsCircularChecks::off) {
let mut tem = elt;
if tem.is_string() {
return tem;
} else if keymapp(tem) {
tem = keymap_prompt(tem);
if tem.is_not_nil() {
return tem;
}
}
}
Qnil
}
/// Same as `map_keymap_internal`, but traverses parent keymaps as well.
/// AUTOLOAD indicates that autoloaded keymaps should be loaded.
#[no_mangle]
pub unsafe extern "C" fn map_keymap(
map: LispObject,
fun: map_keymap_function_t,
args: LispObject,
data: *mut c_void,
autoload: bool,
) {
let mut map = get_keymap(map, true, autoload);
while map.is_cons() {
if let Some((car, cdr)) = map.into() {
if keymapp(car) {
map_keymap(car, fun, args, data, autoload);
map = cdr;
} else {
map = map_keymap_internal(map, fun, args, data);
}
}
if !map.is_cons() {
map = get_keymap(map, false, autoload);
}
}
}
/// Call FUNCTION once for each event binding in KEYMAP.
/// FUNCTION is called with two arguments: the event that is bound, and
/// the definition it is bound to. The event may be a character range.
///
/// If KEYMAP has a parent, the parent's bindings are included as well.
/// This works recursively: if the parent has itself a parent, then the
/// grandparent's bindings are also included and so on.
/// usage: (map-keymap FUNCTION KEYMAP)
#[lisp_fn(name = "map-keymap", c_name = "map_keymap", min = "2")]
pub fn map_keymap_lisp(function: LispObject, keymap: LispObject, sort_first: bool) -> LispObject {
if sort_first {
return call!(intern("map-keymap-sorted").into(), function, keymap);
}
unsafe {
map_keymap(
keymap,
Some(map_keymap_call),
function,
ptr::null_mut(),
true,
)
};
Qnil
}
/// Call FUN for every binding in MAP and stop at (and return) the parent.
/// FUN is called with 4 arguments: FUN (KEY, BINDING, ARGS, DATA).
#[no_mangle]
pub unsafe extern "C" fn map_keymap_internal(
map: LispObject,
fun: map_keymap_function_t,
args: LispObject,
data: *mut c_void,
) -> LispObject {
let map = map;
let tail = match map.into() {
None => Qnil,
Some((car, cdr)) => {
if car.eq(Qkeymap) {
cdr
} else {
map
}
}
};
let mut parent = tail;
for tail_cons in tail.iter_tails(LispConsEndChecks::off, LispConsCircularChecks::off) {
let binding = tail_cons.car();
if binding.eq(Qkeymap) {
break;
} else {
// An embedded parent.
if keymapp(binding) {
break;
}
if let Some((car, cdr)) = binding.into() {
map_keymap_item(fun, args, car, cdr, data);
} else if binding.is_vector() {
if let Some(binding_vec) = binding.as_vectorlike() {
for c in 0..binding_vec.pseudovector_size() {
map_keymap_item(fun, args, c.into(), aref(binding, c), data);
}
}
} else if binding.is_char_table() {
let saved = match fun {
Some(f) => make_save_funcptr_ptr_obj(Some(std::mem::transmute(f)), data, args),
None => make_save_funcptr_ptr_obj(None, data, args),
};
map_char_table(Some(map_keymap_char_table_item), Qnil, binding, saved);
}
}
parent = tail_cons.cdr();
}
parent
}
/// Call FUNCTION once for each event binding in KEYMAP.
/// FUNCTION is called with two arguments: the event that is bound, and
/// the definition it is bound to. The event may be a character range.
/// If KEYMAP has a parent, this function returns it without processing it.
#[lisp_fn(name = "map-keymap-internal", c_name = "map_keymap_internal")]
pub fn map_keymap_internal_lisp(function: LispObject, mut keymap: LispObject) -> LispObject {
keymap = get_keymap(keymap, true, true);
unsafe { map_keymap_internal(keymap, Some(map_keymap_call), function, ptr::null_mut()) }
}
/// Return the binding for command KEYS in current local keymap only.
/// KEYS is a string or vector, a sequence of keystrokes.
/// The binding is probably a symbol with a function definition.
/// If optional argument ACCEPT-DEFAULT is non-nil, recognize default
/// bindings; see the description of `lookup-key' for more details about this.
#[lisp_fn(min = "1")]
pub fn local_key_binding(keys: LispObject, accept_default: LispObject) -> LispObject {
let map = current_local_map();
if map.is_nil() {
Qnil
} else {
lookup_key(map, keys, accept_default)
}
}
/// Return current buffer's local keymap, or nil if it has none.
/// Normally the local keymap is set by the major mode with `use-local-map'.
#[lisp_fn]
pub fn current_local_map() -> LispObject {
ThreadState::current_buffer_unchecked().keymap_
}
/// Select KEYMAP as the local keymap.
/// If KEYMAP is nil, that means no local keymap.
#[lisp_fn]
pub fn use_local_map(mut keymap: LispObject) {
if !keymap.is_nil() {
let map = get_keymap(keymap, true, true);
keymap = map;
}
ThreadState::current_buffer_unchecked().keymap_ = keymap;
}
/// Return the binding for command KEYS in current global keymap only.
/// KEYS is a string or vector, a sequence of keystrokes.
/// The binding is probably a symbol with a function definition.
/// This function's return values are the same as those of `lookup-key'
/// (which see).
///
/// If optional argument ACCEPT-DEFAULT is non-nil, recognize default
/// bindings; see the description of `lookup-key' for more details about this.
#[lisp_fn(min = "1")]
pub fn global_key_binding(keys: LispObject, accept_default: LispObject) -> LispObject {
let map = current_global_map();
if map.is_nil() {
Qnil
} else {
lookup_key(map, keys, accept_default)
}
}
/// Return the current global keymap.
#[lisp_fn]
pub fn current_global_map() -> LispObject {
unsafe { _current_global_map }
}
/// Select KEYMAP as the global keymap.
#[lisp_fn]
pub fn use_global_map(keymap: LispObject) {
unsafe { _current_global_map = get_keymap(keymap, true, true) };
}
// Value is number if KEY is too long; nil if valid but has no definition.
// GC is possible in this function.
/// In keymap KEYMAP, look up key sequence KEY. Return the definition.
/// A value of nil means undefined. See doc of `define-key'
/// for kinds of definitions.
///
/// A number as value means KEY is "too long";
/// that is, characters or symbols in it except for the last one
/// fail to be a valid sequence of prefix characters in KEYMAP.
/// The number is how many characters at the front of KEY
/// it takes to reach a non-prefix key.
///
/// Normally, `lookup-key' ignores bindings for t, which act as default
/// bindings, used when nothing else in the keymap applies; this makes it
/// usable as a general function for probing keymaps. However, if the
/// third optional argument ACCEPT-DEFAULT is non-nil, `lookup-key' will
/// recognize the default bindings, just as `read-key-sequence' does.
#[lisp_fn(min = "2")]
pub fn lookup_key(keymap: LispObject, key: LispObject, accept_default: LispObject) -> LispObject {
let ok = accept_default.is_not_nil();
let mut keymap = get_keymap(keymap, true, true);
let length = key.as_vector_or_string_length() as EmacsInt;
if length == 0 {
return keymap;
}
let mut idx = 0;
loop {
let mut c = aref(key, idx);
idx += 1;
if c.is_cons() && lucid_event_type_list_p(c.into()) {
c = unsafe { Fevent_convert_list(c) };
}
// Turn the 8th bit of string chars into a meta modifier.
if let Some(k) = key.as_string() {
if let Some(x) = c.as_fixnum() {
let x = x as u32;
if x & 0x80 != 0 && !k.is_multibyte() {
c = ((x | char_bits::CHAR_META) & !0x80).into();
}
}
}
// Allow string since binding for `menu-bar-select-buffer'
// includes the buffer name in the key sequence.
if !(c.is_fixnum() || c.is_symbol() || c.is_cons() || c.is_string()) {
message_with_string!("Key sequence contains invalid event %s", c, true);
}
let cmd = unsafe { access_keymap(keymap, c, ok, false, true) };
if idx == length {
return cmd;
}
keymap = get_keymap(cmd, false, true);
if !keymap.is_cons() {
return idx.into();
}
unsafe {
maybe_quit();
};
}
}
/// Define COMMAND as a prefix command. COMMAND should be a symbol.
/// A new sparse keymap is stored as COMMAND's function definition and its
/// value.
/// This prepares COMMAND for use as a prefix key's binding.
/// If a second optional argument MAPVAR is given, it should be a symbol.
/// The map is then stored as MAPVAR's value instead of as COMMAND's
/// value; but COMMAND is still defined as a function.
/// The third optional argument NAME, if given, supplies a menu name
/// string for the map. This is required to use the keymap as a menu.
/// This function returns COMMAND.
#[lisp_fn(min = "1")]
pub fn define_prefix_command(
command: LispSymbolRef,
mapvar: LispObject,
name: LispObject,
) -> LispSymbolRef {
let map = make_sparse_keymap(name);
fset(command, map);
if mapvar.is_not_nil() {
set(mapvar.into(), map);
} else {
set(command, map);
}
command
}
/// Construct and return a new sparse keymap.
/// Its car is `keymap' and its cdr is an alist of (CHAR . DEFINITION),
/// which binds the character CHAR to DEFINITION, or (SYMBOL . DEFINITION),
/// which binds the function key or mouse event SYMBOL to DEFINITION.
/// Initially the alist is nil.
///
/// The optional arg STRING supplies a menu name for the keymap
/// in case you use it as a menu with `x-popup-menu'.
#[lisp_fn(min = "0")]
pub fn make_sparse_keymap(string: LispObject) -> LispObject {
if string.is_not_nil() {
let s = if unsafe { globals.Vpurify_flag }.is_not_nil() {
unsafe { Fpurecopy(string) }
} else {
string
};
list!(Qkeymap, s)
} else {
list!(Qkeymap)
}
}
#[no_mangle]
pub extern "C" fn describe_vector_princ(elt: LispObject, fun: LispObject) {
indent_to(16, 1.into());
call!(fun, elt);
unsafe { Fterpri(Qnil, Qnil) };
}
/// Insert a description of contents of VECTOR.
/// This is text showing the elements of vector matched against indices.
/// DESCRIBER is the output function used; nil means use `princ'.
#[lisp_fn(min = "1", name = "describe-vector", c_name = "describe_vector")]
pub fn describe_vector_lisp(vector: LispObject, mut describer: LispObject) {
if describer.is_nil() {
describer = intern("princ").into();
}
unsafe { specbind(Qstandard_output, current_buffer()) };
if !(vector.is_vector() || vector.is_char_table()) {
wrong_type!(Qvector_or_char_table_p, vector);
}
let count = c_specpdl_index();
unsafe {
describe_vector(
vector,
Qnil,
describer,
Some(describe_vector_princ),
false,
Qnil,
Qnil,
false,
false,
)
};
unbind_to(count, Qnil);
}
#[no_mangle]
pub extern "C" fn copy_keymap_1(chartable: LispObject, idx: LispObject, elt: LispObject) {
unsafe { Fset_char_table_range(chartable, idx, copy_keymap_item(elt)) };
}
/// Return a copy of the keymap KEYMAP.
///
/// Note that this is almost never needed. If you want a keymap that's like
/// another yet with a few changes, you should use map inheritance rather
/// than copying. I.e. something like:
///
/// (let ((map (make-sparse-keymap)))
/// (set-keymap-parent map <theirmap>)
/// (define-key map ...)
/// ...)
///
/// After performing `copy-keymap', the copy starts out with the same definitions
/// of KEYMAP, but changing either the copy or KEYMAP does not affect the other.
/// Any key definitions that are subkeymaps are recursively copied.
/// However, a key definition which is a symbol whose definition is a keymap
/// is not copied.
#[lisp_fn]
pub fn copy_keymap(keymap: LispObject) -> LispObject {
let keymap = get_keymap(keymap, true, false);
let mut tail = list!(Qkeymap);
let copy = tail;
let (_, mut keymap) = keymap.into(); // Skip the `keymap' symbol.
while let Some((mut elt, kmd)) = keymap.into() {
if elt.eq(Qkeymap) {
break;
}
if elt.is_char_table() {
elt = unsafe { Fcopy_sequence(elt) };
unsafe { map_char_table(Some(copy_keymap_1), Qnil, elt, elt) };
} else if let Some(v) = elt.as_vector() {
elt = unsafe { Fcopy_sequence(elt) };
let mut v2 = elt.as_vector().unwrap();
for (i, obj) in v.iter().enumerate() {
v2.set(i, unsafe { copy_keymap_item(obj) });
}
} else if let Some((front, back)) = elt.into() {
if front.eq(Qkeymap) {
// This is a sub keymap
elt = copy_keymap(elt);
} else {
elt = (front, unsafe { copy_keymap_item(back) }).into();
}
}
setcdr(tail.into(), list!(elt));
tail = LispCons::from(tail).cdr();
keymap = kmd;
}
setcdr(tail.into(), keymap);
copy
}
// GC is possible in this funtion if it autoloads a keymap.
/// Return the binding for command KEY in current keymaps.
/// KEY is a string or vector, a sequence of keystrokes.
/// The binding is probably a symbol with a function definition.
///
/// Normally, `key-binding' ignores bindings for t, which act as default
/// bindings, used when nothing else in the keymap applies; this makes it
/// usable as a general function for probing keymaps. However, if the
/// optional second argument ACCEPT-DEFAULT is non-nil, `key-binding' does
/// recognize the default bindings, just as `read-key-sequence' does.
///
/// Like the normal command loop, `key-binding' will remap the command
/// resulting from looking up KEY by looking up the command in the
/// current keymaps. However, if the optional third argument NO-REMAP
/// is non-nil, `key-binding' returns the unmapped command.
///
/// If KEY is a key sequence initiated with the mouse, the used keymaps
/// will depend on the clicked mouse position with regard to the buffer
/// and possible local keymaps on strings.
///
/// If the optional argument POSITION is non-nil, it specifies a mouse
/// position as returned by `event-start' and `event-end', and the lookup
/// occurs in the keymaps associated with it instead of KEY. It can also
/// be a number or marker, in which case the keymap properties at the
/// specified buffer position instead of point are used.
#[lisp_fn(min = "1")]
pub fn | (
key: LispObject,
accept_default: bool,
no_remap: bool,
mut position: LispObject,
) -> LispObject {
if key.is_vector() && position.is_nil() {
let key = key.force_vector();
if key.len() == 0 {
return Qnil;
}
// Mouse events may have a symbolic prefix indicating the scrollbar or modeline
let idx = if key.get(0).is_symbol() && key.len() > 1 {
1
} else {
0
};
let event: keyboard::Event = key.get(idx).into();
// Ignore locations without event data
if event.has_data() {
let kind = event.head_kind();
if kind == Qmouse_click {
position = event.start()
}
}
}
let value = lookup_key(
(Qkeymap, unsafe { Fcurrent_active_maps(Qt, position) }).into(),
key,
accept_default.into(),
);
if value.is_nil() || value.is_integer() {
return Qnil;
}
// If the result of the ordinary keymap lookup is an interactive
// command, look for a key binding (i.e. remapping) for that command.
if !no_remap && value.is_symbol() {
let remap = unsafe { Fcommand_remapping(value, position, Qnil) };
if remap.is_not_nil() {
return remap;
}
}
value
}
include!(concat!(env!("OUT_DIR"), "/keymap_exports.rs"));
| key_binding | identifier_name |
mod.rs | use std::cmp::Ordering;
| ///
/// # Worst-case complexity
/// Constant time and additional memory.
///
/// # Examples
/// ```
/// use malachite_base::orderings::ordering_from_str;
/// use std::cmp::Ordering;
///
/// assert_eq!(ordering_from_str("Equal"), Some(Ordering::Equal));
/// assert_eq!(ordering_from_str("Less"), Some(Ordering::Less));
/// assert_eq!(ordering_from_str("Greater"), Some(Ordering::Greater));
/// assert_eq!(ordering_from_str("abc"), None);
/// ```
#[inline]
pub fn ordering_from_str(src: &str) -> Option<Ordering> {
match src {
"Equal" => Some(Ordering::Equal),
"Less" => Some(Ordering::Less),
"Greater" => Some(Ordering::Greater),
_ => None,
}
}
/// This module contains iterators that generate `Ordering`s without repetition.
pub mod exhaustive;
/// This module contains iterators that generate `Ordering`s randomly.
pub mod random; | pub(crate) const ORDERINGS: [Ordering; 3] = [Ordering::Equal, Ordering::Less, Ordering::Greater];
/// Converts a `&str` to a `Ordering`.
///
/// If the `&str` does not represent a valid `Ordering`, `None` is returned. | random_line_split |
mod.rs | use std::cmp::Ordering;
pub(crate) const ORDERINGS: [Ordering; 3] = [Ordering::Equal, Ordering::Less, Ordering::Greater];
/// Converts a `&str` to a `Ordering`.
///
/// If the `&str` does not represent a valid `Ordering`, `None` is returned.
///
/// # Worst-case complexity
/// Constant time and additional memory.
///
/// # Examples
/// ```
/// use malachite_base::orderings::ordering_from_str;
/// use std::cmp::Ordering;
///
/// assert_eq!(ordering_from_str("Equal"), Some(Ordering::Equal));
/// assert_eq!(ordering_from_str("Less"), Some(Ordering::Less));
/// assert_eq!(ordering_from_str("Greater"), Some(Ordering::Greater));
/// assert_eq!(ordering_from_str("abc"), None);
/// ```
#[inline]
pub fn | (src: &str) -> Option<Ordering> {
match src {
"Equal" => Some(Ordering::Equal),
"Less" => Some(Ordering::Less),
"Greater" => Some(Ordering::Greater),
_ => None,
}
}
/// This module contains iterators that generate `Ordering`s without repetition.
pub mod exhaustive;
/// This module contains iterators that generate `Ordering`s randomly.
pub mod random;
| ordering_from_str | identifier_name |
mod.rs | use std::cmp::Ordering;
pub(crate) const ORDERINGS: [Ordering; 3] = [Ordering::Equal, Ordering::Less, Ordering::Greater];
/// Converts a `&str` to a `Ordering`.
///
/// If the `&str` does not represent a valid `Ordering`, `None` is returned.
///
/// # Worst-case complexity
/// Constant time and additional memory.
///
/// # Examples
/// ```
/// use malachite_base::orderings::ordering_from_str;
/// use std::cmp::Ordering;
///
/// assert_eq!(ordering_from_str("Equal"), Some(Ordering::Equal));
/// assert_eq!(ordering_from_str("Less"), Some(Ordering::Less));
/// assert_eq!(ordering_from_str("Greater"), Some(Ordering::Greater));
/// assert_eq!(ordering_from_str("abc"), None);
/// ```
#[inline]
pub fn ordering_from_str(src: &str) -> Option<Ordering> |
/// This module contains iterators that generate `Ordering`s without repetition.
pub mod exhaustive;
/// This module contains iterators that generate `Ordering`s randomly.
pub mod random;
| {
match src {
"Equal" => Some(Ordering::Equal),
"Less" => Some(Ordering::Less),
"Greater" => Some(Ordering::Greater),
_ => None,
}
} | identifier_body |
suite.py | #!/usr/bin/python
# #
# Copyright 2012-2019 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
# #
"""
This script is a collection of all the testcases.
Usage: "python -m test.framework.suite" or "python test/framework/suite.py"
@author: Toon Willems (Ghent University)
@author: Kenneth Hoste (Ghent University)
"""
import glob
import os
import sys
import tempfile
import unittest
# initialize EasyBuild logging, so we disable it
from easybuild.base import fancylogger
from easybuild.tools.build_log import EasyBuildError
from easybuild.tools.options import set_tmpdir
import test.framework.asyncprocess as a
import test.framework.build_log as bl
import test.framework.config as c
import test.framework.containers as ct
import test.framework.easyblock as b
import test.framework.easyconfig as e
import test.framework.easyconfigparser as ep
import test.framework.easyconfigformat as ef
import test.framework.ebconfigobj as ebco
import test.framework.easyconfigversion as ev
import test.framework.environment as env
import test.framework.docs as d
import test.framework.filetools as f
import test.framework.format_convert as f_c
import test.framework.general as gen
import test.framework.github as g
import test.framework.hooks as h
import test.framework.include as i
import test.framework.lib as lib
import test.framework.license as lic
import test.framework.module_generator as mg
import test.framework.modules as m
import test.framework.modulestool as mt
import test.framework.options as o
import test.framework.parallelbuild as p
import test.framework.package as pkg
import test.framework.repository as r
import test.framework.robot as robot
import test.framework.run as run
import test.framework.style as st
import test.framework.systemtools as s
import test.framework.toolchain as tc
import test.framework.toolchainvariables as tcv
import test.framework.toy_build as t
import test.framework.type_checking as et
import test.framework.tweak as tw
import test.framework.variables as v
import test.framework.yeb as y
# set plain text key ring to be used,
# so a GitHub token stored in it can be obtained without having to provide a password
try:
# with recent versions of keyring, PlaintextKeyring comes from keyrings.alt
import keyring
from keyrings.alt.file import PlaintextKeyring
keyring.set_keyring(PlaintextKeyring())
except ImportError:
try:
# with old versions of keyring, PlaintextKeyring comes from keyring.backends
import keyring
from keyring.backends.file import PlaintextKeyring
keyring.set_keyring(PlaintextKeyring())
except ImportError:
pass
# disable all logging to significantly speed up tests
fancylogger.disableDefaultHandlers()
fancylogger.setLogLevelError()
# make sure temporary files can be created/used
try:
set_tmpdir(raise_error=True)
except EasyBuildError as err:
sys.stderr.write("No execution rights on temporary files, specify another location via $TMPDIR: %s\n" % err)
sys.exit(1)
# initialize logger for all the unit tests
fd, log_fn = tempfile.mkstemp(prefix='easybuild-tests-', suffix='.log')
os.close(fd)
os.remove(log_fn)
fancylogger.logToFile(log_fn)
log = fancylogger.getLogger()
# call suite() for each module and then run them all
# note: make sure the options unit tests run first, to avoid running some of them with a readily initialized config
tests = [gen, bl, o, r, ef, ev, ebco, ep, e, mg, m, mt, f, run, a, robot, b, v, g, tcv, tc, t, c, s, lic, f_c,
tw, p, i, pkg, d, env, et, y, st, h, ct, lib]
SUITE = unittest.TestSuite([x.suite() for x in tests])
res = unittest.TextTestRunner().run(SUITE)
fancylogger.logToFile(log_fn, enable=False)
if not res.wasSuccessful():
sys.stderr.write("ERROR: Not all tests were successful.\n")
print("Log available at %s" % log_fn)
sys.exit(2)
else:
| for fn in glob.glob('%s*' % log_fn):
os.remove(fn) | conditional_block | |
suite.py | #!/usr/bin/python
# #
# Copyright 2012-2019 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
# #
"""
This script is a collection of all the testcases.
Usage: "python -m test.framework.suite" or "python test/framework/suite.py"
@author: Toon Willems (Ghent University)
@author: Kenneth Hoste (Ghent University)
"""
import glob
import os
import sys
import tempfile
import unittest
# initialize EasyBuild logging, so we disable it
from easybuild.base import fancylogger
from easybuild.tools.build_log import EasyBuildError
from easybuild.tools.options import set_tmpdir
import test.framework.asyncprocess as a
import test.framework.build_log as bl
import test.framework.config as c
import test.framework.containers as ct
import test.framework.easyblock as b
import test.framework.easyconfig as e
import test.framework.easyconfigparser as ep
import test.framework.easyconfigformat as ef
import test.framework.ebconfigobj as ebco
import test.framework.easyconfigversion as ev
import test.framework.environment as env
import test.framework.docs as d
import test.framework.filetools as f
import test.framework.format_convert as f_c
import test.framework.general as gen
import test.framework.github as g
import test.framework.hooks as h
import test.framework.include as i
import test.framework.lib as lib
import test.framework.license as lic
import test.framework.module_generator as mg
import test.framework.modules as m
import test.framework.modulestool as mt
import test.framework.options as o
import test.framework.parallelbuild as p
import test.framework.package as pkg
import test.framework.repository as r
import test.framework.robot as robot
import test.framework.run as run
import test.framework.style as st
import test.framework.systemtools as s
import test.framework.toolchain as tc
import test.framework.toolchainvariables as tcv
import test.framework.toy_build as t
import test.framework.type_checking as et
import test.framework.tweak as tw
import test.framework.variables as v
import test.framework.yeb as y
# set plain text key ring to be used,
# so a GitHub token stored in it can be obtained without having to provide a password
try:
# with recent versions of keyring, PlaintextKeyring comes from keyrings.alt
import keyring
from keyrings.alt.file import PlaintextKeyring
keyring.set_keyring(PlaintextKeyring())
except ImportError:
try:
# with old versions of keyring, PlaintextKeyring comes from keyring.backends
import keyring
from keyring.backends.file import PlaintextKeyring
keyring.set_keyring(PlaintextKeyring())
except ImportError:
pass
# disable all logging to significantly speed up tests
fancylogger.disableDefaultHandlers()
fancylogger.setLogLevelError()
# make sure temporary files can be created/used
try:
set_tmpdir(raise_error=True)
except EasyBuildError as err:
sys.stderr.write("No execution rights on temporary files, specify another location via $TMPDIR: %s\n" % err)
sys.exit(1)
# initialize logger for all the unit tests
fd, log_fn = tempfile.mkstemp(prefix='easybuild-tests-', suffix='.log')
os.close(fd)
os.remove(log_fn)
fancylogger.logToFile(log_fn)
log = fancylogger.getLogger()
# call suite() for each module and then run them all
# note: make sure the options unit tests run first, to avoid running some of them with a readily initialized config
tests = [gen, bl, o, r, ef, ev, ebco, ep, e, mg, m, mt, f, run, a, robot, b, v, g, tcv, tc, t, c, s, lic, f_c,
tw, p, i, pkg, d, env, et, y, st, h, ct, lib]
SUITE = unittest.TestSuite([x.suite() for x in tests])
res = unittest.TextTestRunner().run(SUITE)
fancylogger.logToFile(log_fn, enable=False)
if not res.wasSuccessful():
sys.stderr.write("ERROR: Not all tests were successful.\n")
print("Log available at %s" % log_fn) | sys.exit(2)
else:
for fn in glob.glob('%s*' % log_fn):
os.remove(fn) | random_line_split | |
crm_lead.py | # -*- encoding: utf-8 -*-
###############################################################################
# #
# Copyright (C) 2016 Trustcode - www.trustcode.com.br #
# Danimar Ribeiro <danimaribeiro@gmail.com> #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU Affero General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
# #
###############################################################################
from openerp import api, models
class CrmLead(models.Model):
_inherit = 'crm.lead'
@api.multi
def handle_partner_assignation(self, action='create',
partner_id=False, context=None):
partner_ids = super(CrmLead, self).handle_partner_assignation(
action=action, partner_id=partner_id, context=context)
for lead in self:
partner_id = partner_ids[lead.id]
partner = self.env['res.partner'].browse(partner_id)
if partner.parent_id:
|
return partner_ids
| partner_ids[lead.id] = partner.parent_id.id
lead.partner_id = partner.parent_id.id | conditional_block |
crm_lead.py | # -*- encoding: utf-8 -*-
###############################################################################
# #
# Copyright (C) 2016 Trustcode - www.trustcode.com.br #
# Danimar Ribeiro <danimaribeiro@gmail.com> #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU Affero General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
# #
###############################################################################
from openerp import api, models
class CrmLead(models.Model):
_inherit = 'crm.lead'
@api.multi
def handle_partner_assignation(self, action='create',
partner_id=False, context=None):
| partner_ids = super(CrmLead, self).handle_partner_assignation(
action=action, partner_id=partner_id, context=context)
for lead in self:
partner_id = partner_ids[lead.id]
partner = self.env['res.partner'].browse(partner_id)
if partner.parent_id:
partner_ids[lead.id] = partner.parent_id.id
lead.partner_id = partner.parent_id.id
return partner_ids | identifier_body | |
crm_lead.py | # Copyright (C) 2016 Trustcode - www.trustcode.com.br #
# Danimar Ribeiro <danimaribeiro@gmail.com> #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU Affero General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
# #
###############################################################################
from openerp import api, models
class CrmLead(models.Model):
_inherit = 'crm.lead'
@api.multi
def handle_partner_assignation(self, action='create',
partner_id=False, context=None):
partner_ids = super(CrmLead, self).handle_partner_assignation(
action=action, partner_id=partner_id, context=context)
for lead in self:
partner_id = partner_ids[lead.id]
partner = self.env['res.partner'].browse(partner_id)
if partner.parent_id:
partner_ids[lead.id] = partner.parent_id.id
lead.partner_id = partner.parent_id.id
return partner_ids | # -*- encoding: utf-8 -*-
###############################################################################
# # | random_line_split | |
crm_lead.py | # -*- encoding: utf-8 -*-
###############################################################################
# #
# Copyright (C) 2016 Trustcode - www.trustcode.com.br #
# Danimar Ribeiro <danimaribeiro@gmail.com> #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU Affero General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
# #
###############################################################################
from openerp import api, models
class CrmLead(models.Model):
_inherit = 'crm.lead'
@api.multi
def | (self, action='create',
partner_id=False, context=None):
partner_ids = super(CrmLead, self).handle_partner_assignation(
action=action, partner_id=partner_id, context=context)
for lead in self:
partner_id = partner_ids[lead.id]
partner = self.env['res.partner'].browse(partner_id)
if partner.parent_id:
partner_ids[lead.id] = partner.parent_id.id
lead.partner_id = partner.parent_id.id
return partner_ids
| handle_partner_assignation | identifier_name |
run.py | import sys
import os
import numpy as np
sys.path.append(os.path.join(os.getcwd(), ".."))
from run_utils import run_kmc, parse_input
from ParameterJuggler import ParameterSet
def main():
controller, path, app, cfg, n_procs = parse_input(sys.argv)
| heights = ParameterSet(cfg, "confiningSurfaceHeight\s*=\s*(.*)\;")
heights.initialize_set([20.])
diffusions = ParameterSet(cfg, "diffuse\s*=\s*(.*)\;")
diffusions.initialize_set([3])
controller.register_parameter_set(alpha_values)
controller.register_parameter_set(heights)
controller.register_parameter_set(diffusions)
controller.set_repeats(20)
controller.run(run_kmc, path, app, cfg, ask=False, n_procs=n_procs, shuffle=True)
if __name__ == "__main__":
main() | alpha_values = ParameterSet(cfg, "alpha\s*=\s*(.*)\;")
alpha_values.initialize_set(np.linspace(0.5, 2, 16))
| random_line_split |
run.py | import sys
import os
import numpy as np
sys.path.append(os.path.join(os.getcwd(), ".."))
from run_utils import run_kmc, parse_input
from ParameterJuggler import ParameterSet
def main():
|
if __name__ == "__main__":
main()
| controller, path, app, cfg, n_procs = parse_input(sys.argv)
alpha_values = ParameterSet(cfg, "alpha\s*=\s*(.*)\;")
alpha_values.initialize_set(np.linspace(0.5, 2, 16))
heights = ParameterSet(cfg, "confiningSurfaceHeight\s*=\s*(.*)\;")
heights.initialize_set([20.])
diffusions = ParameterSet(cfg, "diffuse\s*=\s*(.*)\;")
diffusions.initialize_set([3])
controller.register_parameter_set(alpha_values)
controller.register_parameter_set(heights)
controller.register_parameter_set(diffusions)
controller.set_repeats(20)
controller.run(run_kmc, path, app, cfg, ask=False, n_procs=n_procs, shuffle=True) | identifier_body |
run.py | import sys
import os
import numpy as np
sys.path.append(os.path.join(os.getcwd(), ".."))
from run_utils import run_kmc, parse_input
from ParameterJuggler import ParameterSet
def main():
controller, path, app, cfg, n_procs = parse_input(sys.argv)
alpha_values = ParameterSet(cfg, "alpha\s*=\s*(.*)\;")
alpha_values.initialize_set(np.linspace(0.5, 2, 16))
heights = ParameterSet(cfg, "confiningSurfaceHeight\s*=\s*(.*)\;")
heights.initialize_set([20.])
diffusions = ParameterSet(cfg, "diffuse\s*=\s*(.*)\;")
diffusions.initialize_set([3])
controller.register_parameter_set(alpha_values)
controller.register_parameter_set(heights)
controller.register_parameter_set(diffusions)
controller.set_repeats(20)
controller.run(run_kmc, path, app, cfg, ask=False, n_procs=n_procs, shuffle=True)
if __name__ == "__main__":
| main() | conditional_block | |
run.py | import sys
import os
import numpy as np
sys.path.append(os.path.join(os.getcwd(), ".."))
from run_utils import run_kmc, parse_input
from ParameterJuggler import ParameterSet
def | ():
controller, path, app, cfg, n_procs = parse_input(sys.argv)
alpha_values = ParameterSet(cfg, "alpha\s*=\s*(.*)\;")
alpha_values.initialize_set(np.linspace(0.5, 2, 16))
heights = ParameterSet(cfg, "confiningSurfaceHeight\s*=\s*(.*)\;")
heights.initialize_set([20.])
diffusions = ParameterSet(cfg, "diffuse\s*=\s*(.*)\;")
diffusions.initialize_set([3])
controller.register_parameter_set(alpha_values)
controller.register_parameter_set(heights)
controller.register_parameter_set(diffusions)
controller.set_repeats(20)
controller.run(run_kmc, path, app, cfg, ask=False, n_procs=n_procs, shuffle=True)
if __name__ == "__main__":
main()
| main | identifier_name |
error.rs | extern crate hyper;
extern crate serde_json as json;
extern crate serde_qs as qs;
use params::to_snakecase;
use std::error;
use std::fmt;
use std::io;
use std::num::ParseIntError;
/// An error encountered when communicating with the Stripe API.
#[derive(Debug)]
pub enum Error {
/// An error reported by Stripe.
Stripe(RequestError),
/// A networking error communicating with the Stripe server.
Http(hyper::Error),
/// An error reading the response body.
Io(io::Error),
/// An error converting between wire format and Rust types.
Conversion(Box<error::Error + Send>),
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(error::Error::description(self))?;
match *self {
Error::Stripe(ref err) => write!(f, ": {}", err),
Error::Http(ref err) => write!(f, ": {}", err),
Error::Io(ref err) => write!(f, ": {}", err),
Error::Conversion(ref err) => write!(f, ": {}", err),
}
}
}
impl error::Error for Error {
fn description(&self) -> &str {
match *self {
Error::Stripe(_) => "error reported by stripe",
Error::Http(_) => "error communicating with stripe",
Error::Io(_) => "error reading response from stripe",
Error::Conversion(_) => "error converting between wire format and Rust types",
}
}
fn cause(&self) -> Option<&error::Error> {
match *self {
Error::Stripe(ref err) => Some(err),
Error::Http(ref err) => Some(err),
Error::Io(ref err) => Some(err),
Error::Conversion(ref err) => Some(&**err),
}
}
}
impl From<RequestError> for Error {
fn from(err: RequestError) -> Error {
Error::Stripe(err)
}
}
impl From<hyper::Error> for Error {
fn from(err: hyper::Error) -> Error {
Error::Http(err)
}
}
impl From<io::Error> for Error {
fn from(err: io::Error) -> Error {
Error::Io(err)
}
}
impl From<qs::Error> for Error {
fn from(err: qs::Error) -> Error {
Error::Conversion(Box::new(err))
}
}
impl From<json::Error> for Error {
fn from(err: json::Error) -> Error {
Error::Conversion(Box::new(err))
}
}
/// The list of possible values for a RequestError's type.
#[derive(Debug, PartialEq, Deserialize)]
pub enum ErrorType {
#[serde(skip_deserializing)]
Unknown,
#[serde(rename = "api_error")]
Api,
#[serde(rename = "api_connection_error")]
Connection,
#[serde(rename = "authentication_error")]
Authentication,
#[serde(rename = "card_error")]
Card,
#[serde(rename = "invalid_request_error")]
InvalidRequest,
#[serde(rename = "rate_limit_error")]
RateLimit,
#[serde(rename = "validation_error")]
Validation,
}
impl Default for ErrorType {
fn default() -> Self {
ErrorType::Unknown
}
}
impl fmt::Display for ErrorType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", to_snakecase(&format!("{:?}Error", self)))
}
}
/// The list of possible values for a RequestError's code.
#[derive(Debug, Deserialize, Serialize, Eq, PartialEq, Hash)]
#[serde(rename_all = "snake_case")]
pub enum ErrorCode {
AccountAlreadyExists,
AccountCountryInvalidAddress,
AccountInvalid,
AccountNumberInvalid,
AlipayUpgradeRequired,
AmountTooLarge,
AmountTooSmall,
ApiKeyExpired,
BalanceInsufficient,
BankAccountExists,
BankAccountUnusable,
BankAccountUnverified,
BitcoinUpgradeRequired,
CardDeclined,
ChargeAlreadyCaptured,
ChargeAlreadyRefunded,
ChargeDisputed,
ChargeExpiredForCapture,
CountryUnsupported,
CouponExpired,
CustomerMaxSubscriptions,
EmailInvalid,
ExpiredCard,
IncorrectAddress,
IncorrectCvc,
IncorrectNumber,
IncorrectZip,
InstantPayoutsUnsupported,
InvalidCardType,
InvalidChargeAmount,
InvalidCvc,
InvalidExpiryMonth,
InvalidExpiryYear,
InvalidNumber,
InvalidSourceUsage,
InvoiceNoCustomerLineItems,
InvoiceNoSubscriptionLineItems,
InvoiceNotEditable,
InvoiceUpcomingNone,
LivemodeMismatch,
Missing,
OrderCreationFailed,
OrderRequiredSettings,
OrderStatusInvalid,
OrderUpstreamTimeout,
OutOfInventory,
ParameterInvalidEmpty,
ParameterInvalidInteger,
ParameterInvalidStringBlank,
ParameterInvalidStringEmpty,
ParameterMissing,
ParameterUnknown,
PaymentMethodUnactivated,
PayoutsNotAllowed,
PlatformApiKeyExpired,
PostalCodeInvalid,
ProcessingError,
ProductInactive,
RateLimit,
ResourceAlreadyExists,
ResourceMissing,
RoutingNumberInvalid,
SecretKeyRequired,
SepaUnsupportedAccount,
ShippingCalculationFailed,
SkuInactive,
StateUnsupported,
TaxIdInvalid,
TaxesCalculationFailed,
TestmodeChargesOnly,
TlsVersionUnsupported,
TokenAlreadyUsed,
TokenInUse,
TransfersNotAllowed,
UpstreamOrderCreationFailed,
UrlInvalid,
#[doc(hidden)] __NonExhaustive,
}
impl fmt::Display for ErrorCode {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", to_snakecase(&format!("{:?}", self)))
}
}
/// An error reported by stripe in a request's response.
///
/// For more details see https://stripe.com/docs/api#errors.
#[derive(Debug, Default, Deserialize)]
pub struct RequestError {
/// The HTTP status in the response.
#[serde(skip_deserializing)]
pub http_status: u16,
/// The type of error returned.
#[serde(rename = "type")]
pub error_type: ErrorType,
/// A human-readable message providing more details about the error.
/// For card errors, these messages can be shown to end users.
#[serde(default)]
pub message: Option<String>,
/// For card errors, a value describing the kind of card error that occured.
pub code: Option<ErrorCode>,
/// For card errors resulting from a bank decline, a string indicating the
/// bank's reason for the decline if they provide one.
pub decline_code: Option<String>,
/// The ID of the failed charge, if applicable.
pub charge: Option<String>,
}
impl fmt::Display for RequestError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result |
}
impl error::Error for RequestError {
fn description(&self) -> &str {
self.message.as_ref().map(|s| s.as_str()).unwrap_or(
"request error",
)
}
}
#[doc(hidden)]
#[derive(Deserialize)]
pub struct ErrorObject {
pub error: RequestError,
}
/// An error encountered when communicating with the Stripe API webhooks.
#[derive(Debug)]
pub enum WebhookError {
BadHeader(ParseIntError),
BadSignature,
BadTimestamp(i64),
BadParse(json::Error),
}
impl fmt::Display for WebhookError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(error::Error::description(self))?;
match *self {
WebhookError::BadHeader(ref err) => write!(f, ": {}", err),
WebhookError::BadSignature => write!(f, "Signatures do not match"),
WebhookError::BadTimestamp(ref err) => write!(f, ": {}", err),
WebhookError::BadParse(ref err) => write!(f, ": {}", err),
}
}
}
impl error::Error for WebhookError {
fn description(&self) -> &str {
match *self {
WebhookError::BadHeader(_) => "error parsing timestamp",
WebhookError::BadSignature => "error comparing signatures",
WebhookError::BadTimestamp(_) => "error comparing timestamps - over tolerance",
WebhookError::BadParse(_) => "error parsing event object",
}
}
fn cause(&self) -> Option<&error::Error> {
match *self {
WebhookError::BadHeader(ref err) => Some(err),
WebhookError::BadSignature => None,
WebhookError::BadTimestamp(_) => None,
WebhookError::BadParse(ref err) => Some(err),
}
}
}
| {
write!(f, "{}({})", self.error_type, self.http_status)?;
if let Some(ref message) = self.message {
write!(f, ": {}", message)?;
}
Ok(())
} | identifier_body |
error.rs | extern crate hyper;
extern crate serde_json as json;
extern crate serde_qs as qs;
use params::to_snakecase;
use std::error;
use std::fmt;
use std::io;
use std::num::ParseIntError;
/// An error encountered when communicating with the Stripe API.
#[derive(Debug)]
pub enum Error {
/// An error reported by Stripe.
Stripe(RequestError),
/// A networking error communicating with the Stripe server.
Http(hyper::Error),
/// An error reading the response body.
Io(io::Error),
/// An error converting between wire format and Rust types.
Conversion(Box<error::Error + Send>),
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(error::Error::description(self))?;
match *self {
Error::Stripe(ref err) => write!(f, ": {}", err),
Error::Http(ref err) => write!(f, ": {}", err),
Error::Io(ref err) => write!(f, ": {}", err),
Error::Conversion(ref err) => write!(f, ": {}", err),
}
}
}
impl error::Error for Error {
fn description(&self) -> &str {
match *self {
Error::Stripe(_) => "error reported by stripe",
Error::Http(_) => "error communicating with stripe",
Error::Io(_) => "error reading response from stripe",
Error::Conversion(_) => "error converting between wire format and Rust types",
}
}
fn cause(&self) -> Option<&error::Error> {
match *self {
Error::Stripe(ref err) => Some(err),
Error::Http(ref err) => Some(err),
Error::Io(ref err) => Some(err),
Error::Conversion(ref err) => Some(&**err),
}
}
}
impl From<RequestError> for Error {
fn from(err: RequestError) -> Error {
Error::Stripe(err)
}
}
impl From<hyper::Error> for Error {
fn from(err: hyper::Error) -> Error {
Error::Http(err)
}
}
impl From<io::Error> for Error {
fn from(err: io::Error) -> Error {
Error::Io(err)
}
}
impl From<qs::Error> for Error {
fn from(err: qs::Error) -> Error {
Error::Conversion(Box::new(err))
}
}
impl From<json::Error> for Error {
fn from(err: json::Error) -> Error {
Error::Conversion(Box::new(err))
}
}
/// The list of possible values for a RequestError's type.
#[derive(Debug, PartialEq, Deserialize)]
pub enum ErrorType {
#[serde(skip_deserializing)]
Unknown,
#[serde(rename = "api_error")]
Api,
#[serde(rename = "api_connection_error")]
Connection,
#[serde(rename = "authentication_error")]
Authentication,
#[serde(rename = "card_error")]
Card,
#[serde(rename = "invalid_request_error")]
InvalidRequest,
#[serde(rename = "rate_limit_error")]
RateLimit,
#[serde(rename = "validation_error")]
Validation,
}
impl Default for ErrorType {
fn default() -> Self {
ErrorType::Unknown
}
}
impl fmt::Display for ErrorType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", to_snakecase(&format!("{:?}Error", self)))
}
}
/// The list of possible values for a RequestError's code.
#[derive(Debug, Deserialize, Serialize, Eq, PartialEq, Hash)]
#[serde(rename_all = "snake_case")]
pub enum ErrorCode {
AccountAlreadyExists,
AccountCountryInvalidAddress,
AccountInvalid,
AccountNumberInvalid,
AlipayUpgradeRequired,
AmountTooLarge,
AmountTooSmall,
ApiKeyExpired,
BalanceInsufficient,
BankAccountExists,
BankAccountUnusable,
BankAccountUnverified,
BitcoinUpgradeRequired,
CardDeclined,
ChargeAlreadyCaptured,
ChargeAlreadyRefunded,
ChargeDisputed,
ChargeExpiredForCapture,
CountryUnsupported,
CouponExpired,
CustomerMaxSubscriptions,
EmailInvalid,
ExpiredCard,
IncorrectAddress,
IncorrectCvc,
IncorrectNumber,
IncorrectZip,
InstantPayoutsUnsupported,
InvalidCardType,
InvalidChargeAmount,
InvalidCvc,
InvalidExpiryMonth,
InvalidExpiryYear,
InvalidNumber,
InvalidSourceUsage,
InvoiceNoCustomerLineItems,
InvoiceNoSubscriptionLineItems,
InvoiceNotEditable,
InvoiceUpcomingNone,
LivemodeMismatch,
Missing,
OrderCreationFailed,
OrderRequiredSettings,
OrderStatusInvalid,
OrderUpstreamTimeout,
OutOfInventory,
ParameterInvalidEmpty,
ParameterInvalidInteger,
ParameterInvalidStringBlank,
ParameterInvalidStringEmpty,
ParameterMissing,
ParameterUnknown,
PaymentMethodUnactivated,
PayoutsNotAllowed,
PlatformApiKeyExpired,
PostalCodeInvalid,
ProcessingError,
ProductInactive,
RateLimit,
ResourceAlreadyExists,
ResourceMissing,
RoutingNumberInvalid,
SecretKeyRequired,
SepaUnsupportedAccount,
ShippingCalculationFailed,
SkuInactive,
StateUnsupported,
TaxIdInvalid,
TaxesCalculationFailed,
TestmodeChargesOnly,
TlsVersionUnsupported,
TokenAlreadyUsed,
TokenInUse,
TransfersNotAllowed,
UpstreamOrderCreationFailed,
UrlInvalid,
#[doc(hidden)] __NonExhaustive,
}
impl fmt::Display for ErrorCode {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", to_snakecase(&format!("{:?}", self)))
}
}
/// An error reported by stripe in a request's response.
///
/// For more details see https://stripe.com/docs/api#errors.
#[derive(Debug, Default, Deserialize)]
pub struct RequestError {
/// The HTTP status in the response. |
/// The type of error returned.
#[serde(rename = "type")]
pub error_type: ErrorType,
/// A human-readable message providing more details about the error.
/// For card errors, these messages can be shown to end users.
#[serde(default)]
pub message: Option<String>,
/// For card errors, a value describing the kind of card error that occured.
pub code: Option<ErrorCode>,
/// For card errors resulting from a bank decline, a string indicating the
/// bank's reason for the decline if they provide one.
pub decline_code: Option<String>,
/// The ID of the failed charge, if applicable.
pub charge: Option<String>,
}
impl fmt::Display for RequestError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}({})", self.error_type, self.http_status)?;
if let Some(ref message) = self.message {
write!(f, ": {}", message)?;
}
Ok(())
}
}
impl error::Error for RequestError {
fn description(&self) -> &str {
self.message.as_ref().map(|s| s.as_str()).unwrap_or(
"request error",
)
}
}
#[doc(hidden)]
#[derive(Deserialize)]
pub struct ErrorObject {
pub error: RequestError,
}
/// An error encountered when communicating with the Stripe API webhooks.
#[derive(Debug)]
pub enum WebhookError {
BadHeader(ParseIntError),
BadSignature,
BadTimestamp(i64),
BadParse(json::Error),
}
impl fmt::Display for WebhookError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(error::Error::description(self))?;
match *self {
WebhookError::BadHeader(ref err) => write!(f, ": {}", err),
WebhookError::BadSignature => write!(f, "Signatures do not match"),
WebhookError::BadTimestamp(ref err) => write!(f, ": {}", err),
WebhookError::BadParse(ref err) => write!(f, ": {}", err),
}
}
}
impl error::Error for WebhookError {
fn description(&self) -> &str {
match *self {
WebhookError::BadHeader(_) => "error parsing timestamp",
WebhookError::BadSignature => "error comparing signatures",
WebhookError::BadTimestamp(_) => "error comparing timestamps - over tolerance",
WebhookError::BadParse(_) => "error parsing event object",
}
}
fn cause(&self) -> Option<&error::Error> {
match *self {
WebhookError::BadHeader(ref err) => Some(err),
WebhookError::BadSignature => None,
WebhookError::BadTimestamp(_) => None,
WebhookError::BadParse(ref err) => Some(err),
}
}
} | #[serde(skip_deserializing)]
pub http_status: u16, | random_line_split |
error.rs | extern crate hyper;
extern crate serde_json as json;
extern crate serde_qs as qs;
use params::to_snakecase;
use std::error;
use std::fmt;
use std::io;
use std::num::ParseIntError;
/// An error encountered when communicating with the Stripe API.
#[derive(Debug)]
pub enum Error {
/// An error reported by Stripe.
Stripe(RequestError),
/// A networking error communicating with the Stripe server.
Http(hyper::Error),
/// An error reading the response body.
Io(io::Error),
/// An error converting between wire format and Rust types.
Conversion(Box<error::Error + Send>),
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(error::Error::description(self))?;
match *self {
Error::Stripe(ref err) => write!(f, ": {}", err),
Error::Http(ref err) => write!(f, ": {}", err),
Error::Io(ref err) => write!(f, ": {}", err),
Error::Conversion(ref err) => write!(f, ": {}", err),
}
}
}
impl error::Error for Error {
fn description(&self) -> &str {
match *self {
Error::Stripe(_) => "error reported by stripe",
Error::Http(_) => "error communicating with stripe",
Error::Io(_) => "error reading response from stripe",
Error::Conversion(_) => "error converting between wire format and Rust types",
}
}
fn cause(&self) -> Option<&error::Error> {
match *self {
Error::Stripe(ref err) => Some(err),
Error::Http(ref err) => Some(err),
Error::Io(ref err) => Some(err),
Error::Conversion(ref err) => Some(&**err),
}
}
}
impl From<RequestError> for Error {
fn from(err: RequestError) -> Error {
Error::Stripe(err)
}
}
impl From<hyper::Error> for Error {
fn from(err: hyper::Error) -> Error {
Error::Http(err)
}
}
impl From<io::Error> for Error {
fn from(err: io::Error) -> Error {
Error::Io(err)
}
}
impl From<qs::Error> for Error {
fn from(err: qs::Error) -> Error {
Error::Conversion(Box::new(err))
}
}
impl From<json::Error> for Error {
fn from(err: json::Error) -> Error {
Error::Conversion(Box::new(err))
}
}
/// The list of possible values for a RequestError's type.
#[derive(Debug, PartialEq, Deserialize)]
pub enum ErrorType {
#[serde(skip_deserializing)]
Unknown,
#[serde(rename = "api_error")]
Api,
#[serde(rename = "api_connection_error")]
Connection,
#[serde(rename = "authentication_error")]
Authentication,
#[serde(rename = "card_error")]
Card,
#[serde(rename = "invalid_request_error")]
InvalidRequest,
#[serde(rename = "rate_limit_error")]
RateLimit,
#[serde(rename = "validation_error")]
Validation,
}
impl Default for ErrorType {
fn default() -> Self {
ErrorType::Unknown
}
}
impl fmt::Display for ErrorType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", to_snakecase(&format!("{:?}Error", self)))
}
}
/// The list of possible values for a RequestError's code.
#[derive(Debug, Deserialize, Serialize, Eq, PartialEq, Hash)]
#[serde(rename_all = "snake_case")]
pub enum ErrorCode {
AccountAlreadyExists,
AccountCountryInvalidAddress,
AccountInvalid,
AccountNumberInvalid,
AlipayUpgradeRequired,
AmountTooLarge,
AmountTooSmall,
ApiKeyExpired,
BalanceInsufficient,
BankAccountExists,
BankAccountUnusable,
BankAccountUnverified,
BitcoinUpgradeRequired,
CardDeclined,
ChargeAlreadyCaptured,
ChargeAlreadyRefunded,
ChargeDisputed,
ChargeExpiredForCapture,
CountryUnsupported,
CouponExpired,
CustomerMaxSubscriptions,
EmailInvalid,
ExpiredCard,
IncorrectAddress,
IncorrectCvc,
IncorrectNumber,
IncorrectZip,
InstantPayoutsUnsupported,
InvalidCardType,
InvalidChargeAmount,
InvalidCvc,
InvalidExpiryMonth,
InvalidExpiryYear,
InvalidNumber,
InvalidSourceUsage,
InvoiceNoCustomerLineItems,
InvoiceNoSubscriptionLineItems,
InvoiceNotEditable,
InvoiceUpcomingNone,
LivemodeMismatch,
Missing,
OrderCreationFailed,
OrderRequiredSettings,
OrderStatusInvalid,
OrderUpstreamTimeout,
OutOfInventory,
ParameterInvalidEmpty,
ParameterInvalidInteger,
ParameterInvalidStringBlank,
ParameterInvalidStringEmpty,
ParameterMissing,
ParameterUnknown,
PaymentMethodUnactivated,
PayoutsNotAllowed,
PlatformApiKeyExpired,
PostalCodeInvalid,
ProcessingError,
ProductInactive,
RateLimit,
ResourceAlreadyExists,
ResourceMissing,
RoutingNumberInvalid,
SecretKeyRequired,
SepaUnsupportedAccount,
ShippingCalculationFailed,
SkuInactive,
StateUnsupported,
TaxIdInvalid,
TaxesCalculationFailed,
TestmodeChargesOnly,
TlsVersionUnsupported,
TokenAlreadyUsed,
TokenInUse,
TransfersNotAllowed,
UpstreamOrderCreationFailed,
UrlInvalid,
#[doc(hidden)] __NonExhaustive,
}
impl fmt::Display for ErrorCode {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", to_snakecase(&format!("{:?}", self)))
}
}
/// An error reported by stripe in a request's response.
///
/// For more details see https://stripe.com/docs/api#errors.
#[derive(Debug, Default, Deserialize)]
pub struct RequestError {
/// The HTTP status in the response.
#[serde(skip_deserializing)]
pub http_status: u16,
/// The type of error returned.
#[serde(rename = "type")]
pub error_type: ErrorType,
/// A human-readable message providing more details about the error.
/// For card errors, these messages can be shown to end users.
#[serde(default)]
pub message: Option<String>,
/// For card errors, a value describing the kind of card error that occured.
pub code: Option<ErrorCode>,
/// For card errors resulting from a bank decline, a string indicating the
/// bank's reason for the decline if they provide one.
pub decline_code: Option<String>,
/// The ID of the failed charge, if applicable.
pub charge: Option<String>,
}
impl fmt::Display for RequestError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}({})", self.error_type, self.http_status)?;
if let Some(ref message) = self.message {
write!(f, ": {}", message)?;
}
Ok(())
}
}
impl error::Error for RequestError {
fn description(&self) -> &str {
self.message.as_ref().map(|s| s.as_str()).unwrap_or(
"request error",
)
}
}
#[doc(hidden)]
#[derive(Deserialize)]
pub struct ErrorObject {
pub error: RequestError,
}
/// An error encountered when communicating with the Stripe API webhooks.
#[derive(Debug)]
pub enum WebhookError {
BadHeader(ParseIntError),
BadSignature,
BadTimestamp(i64),
BadParse(json::Error),
}
impl fmt::Display for WebhookError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(error::Error::description(self))?;
match *self {
WebhookError::BadHeader(ref err) => write!(f, ": {}", err),
WebhookError::BadSignature => write!(f, "Signatures do not match"),
WebhookError::BadTimestamp(ref err) => write!(f, ": {}", err),
WebhookError::BadParse(ref err) => write!(f, ": {}", err),
}
}
}
impl error::Error for WebhookError {
fn description(&self) -> &str {
match *self {
WebhookError::BadHeader(_) => "error parsing timestamp",
WebhookError::BadSignature => "error comparing signatures",
WebhookError::BadTimestamp(_) => "error comparing timestamps - over tolerance",
WebhookError::BadParse(_) => "error parsing event object",
}
}
fn | (&self) -> Option<&error::Error> {
match *self {
WebhookError::BadHeader(ref err) => Some(err),
WebhookError::BadSignature => None,
WebhookError::BadTimestamp(_) => None,
WebhookError::BadParse(ref err) => Some(err),
}
}
}
| cause | identifier_name |
file.rs | use bytes;
use std;
use std::io::Read;
use std::os::unix::fs::PermissionsExt;
use std::path::Path;
pub fn list_dir(path: &Path) -> Vec<String> |
pub fn contents(path: &Path) -> bytes::Bytes {
let mut contents = Vec::new();
std::fs::File::open(path)
.and_then(|mut f| f.read_to_end(&mut contents))
.expect("Error reading file");
bytes::Bytes::from(contents)
}
pub fn is_executable(path: &Path) -> bool {
std::fs::metadata(path)
.expect("Getting file metadata")
.permissions()
.mode()
& 0o100
== 0o100
}
| {
let mut v: Vec<_> = std::fs::read_dir(path)
.unwrap_or_else(|err| panic!("Listing dir {:?}: {:?}", path, err))
.map(|entry| {
entry
.expect("Error reading entry")
.file_name()
.to_string_lossy()
.to_string()
})
.collect();
v.sort();
v
} | identifier_body |
file.rs | use bytes;
use std;
use std::io::Read;
use std::os::unix::fs::PermissionsExt;
use std::path::Path;
pub fn list_dir(path: &Path) -> Vec<String> {
let mut v: Vec<_> = std::fs::read_dir(path)
.unwrap_or_else(|err| panic!("Listing dir {:?}: {:?}", path, err))
.map(|entry| {
entry
.expect("Error reading entry")
.file_name()
.to_string_lossy()
.to_string()
})
.collect();
v.sort();
v
}
pub fn | (path: &Path) -> bytes::Bytes {
let mut contents = Vec::new();
std::fs::File::open(path)
.and_then(|mut f| f.read_to_end(&mut contents))
.expect("Error reading file");
bytes::Bytes::from(contents)
}
pub fn is_executable(path: &Path) -> bool {
std::fs::metadata(path)
.expect("Getting file metadata")
.permissions()
.mode()
& 0o100
== 0o100
}
| contents | identifier_name |
file.rs | use bytes;
use std;
use std::io::Read;
use std::os::unix::fs::PermissionsExt;
use std::path::Path;
| .map(|entry| {
entry
.expect("Error reading entry")
.file_name()
.to_string_lossy()
.to_string()
})
.collect();
v.sort();
v
}
pub fn contents(path: &Path) -> bytes::Bytes {
let mut contents = Vec::new();
std::fs::File::open(path)
.and_then(|mut f| f.read_to_end(&mut contents))
.expect("Error reading file");
bytes::Bytes::from(contents)
}
pub fn is_executable(path: &Path) -> bool {
std::fs::metadata(path)
.expect("Getting file metadata")
.permissions()
.mode()
& 0o100
== 0o100
} | pub fn list_dir(path: &Path) -> Vec<String> {
let mut v: Vec<_> = std::fs::read_dir(path)
.unwrap_or_else(|err| panic!("Listing dir {:?}: {:?}", path, err)) | random_line_split |
index.tsx | import * as React from "react";
import * as ReactDOM from "react-dom";
import { hashHistory } from "react-router";
import configureStore from "./store/configureStore";
import { syncHistoryWithStore } from "react-router-redux";
import { createClient } from "../common/worker";
import { ElectronClient } from "../common/electron/worker";
import "../../node_modules/bootstrap/dist/css/bootstrap.css";
import "../assets/css/app.css";
import { AppContainer } from "react-hot-loader";
import Root from "./root";
import { buildRoutes } from "./routes";
createClient(() => new ElectronClient());
const store = configureStore({});
const history = syncHistoryWithStore(hashHistory, store);
const routes = buildRoutes();
ReactDOM.render(
<AppContainer>
<Root store={store} history={history} routes={routes} />
</AppContainer>,
document.getElementById("root"),
);
const m = module as any;
if (m.hot) {
m.hot.accept("./root", () => {
// tslint:disable-next-line:variable-name
const NewRoot = require("./root").default;
const newRoutes = buildRoutes();
ReactDOM.render(
<AppContainer>
<NewRoot store={store} history={history} routes={newRoutes} />
</AppContainer>, | document.getElementById("root"),
);
});
} | random_line_split | |
index.tsx | import * as React from "react";
import * as ReactDOM from "react-dom";
import { hashHistory } from "react-router";
import configureStore from "./store/configureStore";
import { syncHistoryWithStore } from "react-router-redux";
import { createClient } from "../common/worker";
import { ElectronClient } from "../common/electron/worker";
import "../../node_modules/bootstrap/dist/css/bootstrap.css";
import "../assets/css/app.css";
import { AppContainer } from "react-hot-loader";
import Root from "./root";
import { buildRoutes } from "./routes";
createClient(() => new ElectronClient());
const store = configureStore({});
const history = syncHistoryWithStore(hashHistory, store);
const routes = buildRoutes();
ReactDOM.render(
<AppContainer>
<Root store={store} history={history} routes={routes} />
</AppContainer>,
document.getElementById("root"),
);
const m = module as any;
if (m.hot) | {
m.hot.accept("./root", () => {
// tslint:disable-next-line:variable-name
const NewRoot = require("./root").default;
const newRoutes = buildRoutes();
ReactDOM.render(
<AppContainer>
<NewRoot store={store} history={history} routes={newRoutes} />
</AppContainer>,
document.getElementById("root"),
);
});
} | conditional_block | |
project.rs | /*
* project.rs: Commands to save/load projects.
* Copyright (C) 2019 Oddcoder
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
use crate::core::*;
use crate::helper::*;
use flate2::write::{ZlibDecoder, ZlibEncoder};
use flate2::Compression;
use serde::Deserialize;
use std::fs::File;
use std::io::prelude::*;
use std::mem;
#[derive(Default)]
pub struct Save {}
impl Save {
pub fn new() -> Self {
Default::default()
}
}
impl Cmd for Save {
fn run(&mut self, core: &mut Core, args: &[String]) {
if args.len() != 1 {
expect(core, args.len() as u64, 1);
return;
}
let data = match serde_cbor::to_vec(&core) {
Ok(data) => data,
Err(e) => return error_msg(core, "Failed to serialize project", &e.to_string()),
};
let mut file = match File::create(&args[0]) {
Ok(file) => file,
Err(e) => return error_msg(core, "Failed to open file", &e.to_string()),
};
let mut compressor = ZlibEncoder::new(Vec::new(), Compression::default());
compressor.write_all(&data).unwrap();
let compressed_data = compressor.finish().unwrap();
if let Err(e) = file.write_all(&compressed_data) {
return error_msg(core, "Failed to save project", &e.to_string());
}
}
fn help(&self, core: &mut Core) {
help(core, &"save", &"", vec![("[file_path]", "Save project into given path.")]);
}
}
#[derive(Default)]
pub struct Load {}
impl Load {
pub fn new() -> Self {
Default::default()
}
}
impl Cmd for Load {
fn run(&mut self, core: &mut Core, args: &[String]) {
if args.len() != 1 {
expect(core, args.len() as u64, 1);
return;
}
let mut file = match File::open(&args[0]) {
Ok(file) => file,
Err(e) => return error_msg(core, "Failed to open file", &e.to_string()),
};
let mut compressed_data: Vec<u8> = Vec::new();
if let Err(e) = file.read_to_end(&mut compressed_data) {
return error_msg(core, "Failed to load project", &e.to_string());
}
let mut data = Vec::new();
let mut decompressor = ZlibDecoder::new(data);
if let Err(e) = decompressor.write_all(&compressed_data) {
return error_msg(core, "Failed to decompress project", &e.to_string());
}
data = match decompressor.finish() {
Ok(data) => data,
Err(e) => return error_msg(core, "Failed to decompress project", &e.to_string()),
};
let mut deserializer = serde_cbor::Deserializer::from_slice(&data);
let mut core2: Core = match Core::deserialize(&mut deserializer) {
Ok(core) => core,
Err(e) => return error_msg(core, "Failed to load project", &e.to_string()),
};
mem::swap(&mut core.stdout, &mut core2.stdout);
mem::swap(&mut core.stderr, &mut core2.stderr);
mem::swap(&mut core.env, &mut core2.env);
core2.set_commands(core.commands());
*core = core2;
}
fn help(&self, core: &mut Core) {
help(core, &"load", &"", vec![("[file_path]", "load project from given path.")]);
}
}
#[cfg(test)]
mod test_project {
use super::*;
use crate::writer::*;
use rair_io::*;
use std::fs;
#[test]
fn test_project_help() {
let mut core = Core::new_no_colors();
core.stderr = Writer::new_buf();
core.stdout = Writer::new_buf();
let load = Load::new();
let save = Save::new();
load.help(&mut core);
save.help(&mut core);
assert_eq!(
core.stdout.utf8_string().unwrap(),
"Command: [load]\n\n\
Usage:\n\
load [file_path]\tload project from given path.\n\
Command: [save]\n\n\
Usage:\n\
save [file_path]\tSave project into given path.\n"
);
assert_eq!(core.stderr.utf8_string().unwrap(), "");
}
#[test]
fn test_project() |
}
| {
let mut core = Core::new_no_colors();
core.stderr = Writer::new_buf();
core.stdout = Writer::new_buf();
let mut load = Load::new();
let mut save = Save::new();
core.io.open("malloc://0x500", IoMode::READ | IoMode::WRITE).unwrap();
core.io.open_at("malloc://0x1337", IoMode::READ | IoMode::WRITE, 0x31000).unwrap();
core.io.map(0x31000, 0xfff31000, 0x337).unwrap();
save.run(&mut core, &["rair_project".to_string()]);
core.io.close_all();
load.run(&mut core, &["rair_project".to_string()]);
core.run("files", &[]);
core.run("maps", &[]);
assert_eq!(
core.stdout.utf8_string().unwrap(),
"Handle\tStart address\tsize\t\tPermissions\tURI\n\
0\t0x00000000\t0x00000500\tWRITE | READ\tmalloc://0x500\n\
1\t0x00031000\t0x00001337\tWRITE | READ\tmalloc://0x1337\n\
Virtual Address Physical Address Size\n\
0xfff31000 0x31000 0x337\n"
);
assert_eq!(core.stderr.utf8_string().unwrap(), "");
fs::remove_file("rair_project").unwrap();
} | identifier_body |
project.rs | /*
* project.rs: Commands to save/load projects.
* Copyright (C) 2019 Oddcoder
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
use crate::core::*;
use crate::helper::*;
use flate2::write::{ZlibDecoder, ZlibEncoder};
use flate2::Compression;
use serde::Deserialize;
use std::fs::File;
use std::io::prelude::*;
use std::mem;
#[derive(Default)]
pub struct Save {}
impl Save {
pub fn new() -> Self {
Default::default()
}
}
impl Cmd for Save {
fn run(&mut self, core: &mut Core, args: &[String]) {
if args.len() != 1 {
expect(core, args.len() as u64, 1);
return;
}
let data = match serde_cbor::to_vec(&core) {
Ok(data) => data,
Err(e) => return error_msg(core, "Failed to serialize project", &e.to_string()),
};
let mut file = match File::create(&args[0]) {
Ok(file) => file,
Err(e) => return error_msg(core, "Failed to open file", &e.to_string()),
};
let mut compressor = ZlibEncoder::new(Vec::new(), Compression::default());
compressor.write_all(&data).unwrap();
let compressed_data = compressor.finish().unwrap();
if let Err(e) = file.write_all(&compressed_data) {
return error_msg(core, "Failed to save project", &e.to_string());
}
}
fn help(&self, core: &mut Core) {
help(core, &"save", &"", vec![("[file_path]", "Save project into given path.")]);
}
}
#[derive(Default)]
pub struct Load {}
impl Load {
pub fn new() -> Self {
Default::default()
}
}
impl Cmd for Load {
fn | (&mut self, core: &mut Core, args: &[String]) {
if args.len() != 1 {
expect(core, args.len() as u64, 1);
return;
}
let mut file = match File::open(&args[0]) {
Ok(file) => file,
Err(e) => return error_msg(core, "Failed to open file", &e.to_string()),
};
let mut compressed_data: Vec<u8> = Vec::new();
if let Err(e) = file.read_to_end(&mut compressed_data) {
return error_msg(core, "Failed to load project", &e.to_string());
}
let mut data = Vec::new();
let mut decompressor = ZlibDecoder::new(data);
if let Err(e) = decompressor.write_all(&compressed_data) {
return error_msg(core, "Failed to decompress project", &e.to_string());
}
data = match decompressor.finish() {
Ok(data) => data,
Err(e) => return error_msg(core, "Failed to decompress project", &e.to_string()),
};
let mut deserializer = serde_cbor::Deserializer::from_slice(&data);
let mut core2: Core = match Core::deserialize(&mut deserializer) {
Ok(core) => core,
Err(e) => return error_msg(core, "Failed to load project", &e.to_string()),
};
mem::swap(&mut core.stdout, &mut core2.stdout);
mem::swap(&mut core.stderr, &mut core2.stderr);
mem::swap(&mut core.env, &mut core2.env);
core2.set_commands(core.commands());
*core = core2;
}
fn help(&self, core: &mut Core) {
help(core, &"load", &"", vec![("[file_path]", "load project from given path.")]);
}
}
#[cfg(test)]
mod test_project {
use super::*;
use crate::writer::*;
use rair_io::*;
use std::fs;
#[test]
fn test_project_help() {
let mut core = Core::new_no_colors();
core.stderr = Writer::new_buf();
core.stdout = Writer::new_buf();
let load = Load::new();
let save = Save::new();
load.help(&mut core);
save.help(&mut core);
assert_eq!(
core.stdout.utf8_string().unwrap(),
"Command: [load]\n\n\
Usage:\n\
load [file_path]\tload project from given path.\n\
Command: [save]\n\n\
Usage:\n\
save [file_path]\tSave project into given path.\n"
);
assert_eq!(core.stderr.utf8_string().unwrap(), "");
}
#[test]
fn test_project() {
let mut core = Core::new_no_colors();
core.stderr = Writer::new_buf();
core.stdout = Writer::new_buf();
let mut load = Load::new();
let mut save = Save::new();
core.io.open("malloc://0x500", IoMode::READ | IoMode::WRITE).unwrap();
core.io.open_at("malloc://0x1337", IoMode::READ | IoMode::WRITE, 0x31000).unwrap();
core.io.map(0x31000, 0xfff31000, 0x337).unwrap();
save.run(&mut core, &["rair_project".to_string()]);
core.io.close_all();
load.run(&mut core, &["rair_project".to_string()]);
core.run("files", &[]);
core.run("maps", &[]);
assert_eq!(
core.stdout.utf8_string().unwrap(),
"Handle\tStart address\tsize\t\tPermissions\tURI\n\
0\t0x00000000\t0x00000500\tWRITE | READ\tmalloc://0x500\n\
1\t0x00031000\t0x00001337\tWRITE | READ\tmalloc://0x1337\n\
Virtual Address Physical Address Size\n\
0xfff31000 0x31000 0x337\n"
);
assert_eq!(core.stderr.utf8_string().unwrap(), "");
fs::remove_file("rair_project").unwrap();
}
}
| run | identifier_name |
project.rs | /*
* project.rs: Commands to save/load projects.
* Copyright (C) 2019 Oddcoder
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
use crate::core::*;
use crate::helper::*;
use flate2::write::{ZlibDecoder, ZlibEncoder};
use flate2::Compression;
use serde::Deserialize;
use std::fs::File;
use std::io::prelude::*;
use std::mem;
#[derive(Default)]
pub struct Save {}
impl Save {
pub fn new() -> Self {
Default::default()
}
}
impl Cmd for Save {
fn run(&mut self, core: &mut Core, args: &[String]) {
if args.len() != 1 {
expect(core, args.len() as u64, 1);
return;
}
let data = match serde_cbor::to_vec(&core) {
Ok(data) => data,
Err(e) => return error_msg(core, "Failed to serialize project", &e.to_string()),
};
let mut file = match File::create(&args[0]) {
Ok(file) => file,
Err(e) => return error_msg(core, "Failed to open file", &e.to_string()),
};
let mut compressor = ZlibEncoder::new(Vec::new(), Compression::default());
compressor.write_all(&data).unwrap();
let compressed_data = compressor.finish().unwrap();
if let Err(e) = file.write_all(&compressed_data) {
return error_msg(core, "Failed to save project", &e.to_string());
}
}
fn help(&self, core: &mut Core) {
help(core, &"save", &"", vec![("[file_path]", "Save project into given path.")]);
}
}
#[derive(Default)]
pub struct Load {}
impl Load {
pub fn new() -> Self {
Default::default()
}
}
impl Cmd for Load {
fn run(&mut self, core: &mut Core, args: &[String]) {
if args.len() != 1 {
expect(core, args.len() as u64, 1);
return;
}
let mut file = match File::open(&args[0]) {
Ok(file) => file,
Err(e) => return error_msg(core, "Failed to open file", &e.to_string()),
};
let mut compressed_data: Vec<u8> = Vec::new();
if let Err(e) = file.read_to_end(&mut compressed_data) {
return error_msg(core, "Failed to load project", &e.to_string());
}
let mut data = Vec::new();
let mut decompressor = ZlibDecoder::new(data);
if let Err(e) = decompressor.write_all(&compressed_data) {
return error_msg(core, "Failed to decompress project", &e.to_string());
}
data = match decompressor.finish() {
Ok(data) => data,
Err(e) => return error_msg(core, "Failed to decompress project", &e.to_string()),
};
let mut deserializer = serde_cbor::Deserializer::from_slice(&data);
let mut core2: Core = match Core::deserialize(&mut deserializer) {
Ok(core) => core,
Err(e) => return error_msg(core, "Failed to load project", &e.to_string()),
};
mem::swap(&mut core.stdout, &mut core2.stdout);
mem::swap(&mut core.stderr, &mut core2.stderr);
mem::swap(&mut core.env, &mut core2.env);
core2.set_commands(core.commands());
*core = core2;
}
fn help(&self, core: &mut Core) {
help(core, &"load", &"", vec![("[file_path]", "load project from given path.")]);
}
}
#[cfg(test)]
mod test_project {
use super::*;
use crate::writer::*;
use rair_io::*;
use std::fs;
#[test]
fn test_project_help() {
let mut core = Core::new_no_colors();
core.stderr = Writer::new_buf();
core.stdout = Writer::new_buf();
let load = Load::new();
let save = Save::new();
load.help(&mut core);
save.help(&mut core);
assert_eq!(
core.stdout.utf8_string().unwrap(),
"Command: [load]\n\n\
Usage:\n\
load [file_path]\tload project from given path.\n\
Command: [save]\n\n\
Usage:\n\
save [file_path]\tSave project into given path.\n"
);
assert_eq!(core.stderr.utf8_string().unwrap(), "");
}
#[test]
fn test_project() {
let mut core = Core::new_no_colors();
core.stderr = Writer::new_buf(); | core.stdout = Writer::new_buf();
let mut load = Load::new();
let mut save = Save::new();
core.io.open("malloc://0x500", IoMode::READ | IoMode::WRITE).unwrap();
core.io.open_at("malloc://0x1337", IoMode::READ | IoMode::WRITE, 0x31000).unwrap();
core.io.map(0x31000, 0xfff31000, 0x337).unwrap();
save.run(&mut core, &["rair_project".to_string()]);
core.io.close_all();
load.run(&mut core, &["rair_project".to_string()]);
core.run("files", &[]);
core.run("maps", &[]);
assert_eq!(
core.stdout.utf8_string().unwrap(),
"Handle\tStart address\tsize\t\tPermissions\tURI\n\
0\t0x00000000\t0x00000500\tWRITE | READ\tmalloc://0x500\n\
1\t0x00031000\t0x00001337\tWRITE | READ\tmalloc://0x1337\n\
Virtual Address Physical Address Size\n\
0xfff31000 0x31000 0x337\n"
);
assert_eq!(core.stderr.utf8_string().unwrap(), "");
fs::remove_file("rair_project").unwrap();
}
} | random_line_split | |
test.ts | /*
* @license Apache-2.0
* | * you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* tslint:disable:no-invalid-this */
import isSkewSymmetricMatrix = require( './index' );
// TESTS //
// The function returns a boolean...
{
const matrix = {
'data': [ 2, 1, 1, 2 ],
'ndims': 2,
'shape': [ 2, 2 ],
'strides': [ 2, 1 ],
'offset': 0,
'order': 'row-major',
'dtype': 'generic',
'length': 4,
'flags': {},
'get': function get( i: number, j: number ): number {
const idx = ( this.strides[ 0 ] * i ) + ( this.strides[ 1 ] * j );
return this.data[ idx ];
},
'set': function set( i: number, j: number, v: number ): number {
const idx = ( this.strides[ 0 ] * i ) + ( this.strides[ 1 ] * j );
this.data[ idx ] = v;
return v;
}
};
isSkewSymmetricMatrix( matrix ); // $ExpectType boolean
isSkewSymmetricMatrix( [] ); // $ExpectType boolean
isSkewSymmetricMatrix( false ); // $ExpectType boolean
}
// The compiler throws an error if the function is provided an unsupported number of arguments...
{
isSkewSymmetricMatrix(); // $ExpectError
} | * Copyright (c) 2021 The Stdlib Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); | random_line_split |
persistable.rs | use std::marker::PhantomData;
use expression::Expression;
use query_builder::{QueryBuilder, BuildQueryResult};
use query_source::{Table, Column};
use types::NativeSqlType;
/// Represents that a structure can be used to to insert a new row into the database.
/// Implementations can be automatically generated by
/// [`#[insertable_into]`](https://github.com/sgrif/diesel/tree/master/diesel_codegen#insertable_intotable_name).
/// This is automatically implemented for `&[T]`, `Vec<T>` and `&Vec<T>` for inserting more than
/// one record.
pub trait Insertable<T: Table> {
type Columns: InsertableColumns<T>;
type Values: Expression<SqlType=<Self::Columns as InsertableColumns<T>>::SqlType>;
fn columns() -> Self::Columns;
fn values(self) -> Self::Values;
}
pub trait InsertableColumns<T: Table> {
type SqlType: NativeSqlType;
fn names(&self) -> String;
}
impl<'a, T, U> Insertable<T> for &'a [U] where
T: Table,
&'a U: Insertable<T>,
{
type Columns = <&'a U as Insertable<T>>::Columns;
type Values = InsertValues<'a, T, U>;
fn columns() -> Self::Columns {
<&'a U>::columns()
}
fn values(self) -> Self::Values {
InsertValues {
values: self,
_marker: PhantomData,
}
}
}
impl<'a, T, U> Insertable<T> for &'a Vec<U> where
T: Table,
&'a U: Insertable<T>,
{
type Columns = <&'a U as Insertable<T>>::Columns;
type Values = InsertValues<'a, T, U>;
fn | () -> Self::Columns {
<&'a U>::columns()
}
fn values(self) -> Self::Values {
InsertValues {
values: &*self,
_marker: PhantomData,
}
}
}
pub struct InsertValues<'a, T, U: 'a> {
values: &'a [U],
_marker: PhantomData<T>,
}
impl<'a, T, U> Expression for InsertValues<'a, T, U> where
T: Table,
&'a U: Insertable<T>,
{
type SqlType = <<&'a U as Insertable<T>>::Columns as InsertableColumns<T>>::SqlType;
fn to_sql(&self, out: &mut QueryBuilder) -> BuildQueryResult {
self.to_insert_sql(out)
}
fn to_insert_sql(&self, out: &mut QueryBuilder) -> BuildQueryResult {
for (i, record) in self.values.into_iter().enumerate() {
if i != 0 {
out.push_sql(", ");
}
try!(record.values().to_insert_sql(out));
}
Ok(())
}
}
impl<C: Column<Table=T>, T: Table> InsertableColumns<T> for C {
type SqlType = <Self as Expression>::SqlType;
fn names(&self) -> String {
Self::name().to_string()
}
}
| columns | identifier_name |
persistable.rs | use std::marker::PhantomData;
use expression::Expression;
use query_builder::{QueryBuilder, BuildQueryResult};
use query_source::{Table, Column};
use types::NativeSqlType;
/// Represents that a structure can be used to to insert a new row into the database.
/// Implementations can be automatically generated by
/// [`#[insertable_into]`](https://github.com/sgrif/diesel/tree/master/diesel_codegen#insertable_intotable_name).
/// This is automatically implemented for `&[T]`, `Vec<T>` and `&Vec<T>` for inserting more than
/// one record.
pub trait Insertable<T: Table> {
type Columns: InsertableColumns<T>;
type Values: Expression<SqlType=<Self::Columns as InsertableColumns<T>>::SqlType>;
fn columns() -> Self::Columns;
fn values(self) -> Self::Values;
}
pub trait InsertableColumns<T: Table> {
type SqlType: NativeSqlType;
fn names(&self) -> String;
}
impl<'a, T, U> Insertable<T> for &'a [U] where
T: Table,
&'a U: Insertable<T>,
{
type Columns = <&'a U as Insertable<T>>::Columns;
type Values = InsertValues<'a, T, U>;
fn columns() -> Self::Columns {
<&'a U>::columns()
}
fn values(self) -> Self::Values {
InsertValues {
values: self,
_marker: PhantomData,
}
}
}
impl<'a, T, U> Insertable<T> for &'a Vec<U> where
T: Table,
&'a U: Insertable<T>,
{
type Columns = <&'a U as Insertable<T>>::Columns;
type Values = InsertValues<'a, T, U>;
fn columns() -> Self::Columns {
<&'a U>::columns()
}
fn values(self) -> Self::Values {
InsertValues {
values: &*self,
_marker: PhantomData,
}
}
}
pub struct InsertValues<'a, T, U: 'a> {
values: &'a [U],
_marker: PhantomData<T>,
}
impl<'a, T, U> Expression for InsertValues<'a, T, U> where
T: Table,
&'a U: Insertable<T>,
{
type SqlType = <<&'a U as Insertable<T>>::Columns as InsertableColumns<T>>::SqlType;
fn to_sql(&self, out: &mut QueryBuilder) -> BuildQueryResult {
self.to_insert_sql(out)
}
fn to_insert_sql(&self, out: &mut QueryBuilder) -> BuildQueryResult {
for (i, record) in self.values.into_iter().enumerate() {
if i != 0 {
out.push_sql(", ");
}
try!(record.values().to_insert_sql(out));
}
Ok(())
}
}
impl<C: Column<Table=T>, T: Table> InsertableColumns<T> for C {
type SqlType = <Self as Expression>::SqlType;
fn names(&self) -> String |
}
| {
Self::name().to_string()
} | identifier_body |
persistable.rs | use std::marker::PhantomData;
use expression::Expression;
use query_builder::{QueryBuilder, BuildQueryResult};
use query_source::{Table, Column};
use types::NativeSqlType;
/// Represents that a structure can be used to to insert a new row into the database.
/// Implementations can be automatically generated by
/// [`#[insertable_into]`](https://github.com/sgrif/diesel/tree/master/diesel_codegen#insertable_intotable_name).
/// This is automatically implemented for `&[T]`, `Vec<T>` and `&Vec<T>` for inserting more than
/// one record.
pub trait Insertable<T: Table> {
type Columns: InsertableColumns<T>;
type Values: Expression<SqlType=<Self::Columns as InsertableColumns<T>>::SqlType>;
fn columns() -> Self::Columns;
fn values(self) -> Self::Values;
}
pub trait InsertableColumns<T: Table> {
type SqlType: NativeSqlType;
fn names(&self) -> String;
}
impl<'a, T, U> Insertable<T> for &'a [U] where
T: Table,
&'a U: Insertable<T>,
{
type Columns = <&'a U as Insertable<T>>::Columns;
type Values = InsertValues<'a, T, U>;
fn columns() -> Self::Columns {
<&'a U>::columns()
}
fn values(self) -> Self::Values {
InsertValues {
values: self,
_marker: PhantomData,
}
}
}
impl<'a, T, U> Insertable<T> for &'a Vec<U> where
T: Table,
&'a U: Insertable<T>,
{
type Columns = <&'a U as Insertable<T>>::Columns;
type Values = InsertValues<'a, T, U>;
fn columns() -> Self::Columns {
<&'a U>::columns()
}
fn values(self) -> Self::Values {
InsertValues {
values: &*self,
_marker: PhantomData,
}
}
}
pub struct InsertValues<'a, T, U: 'a> {
values: &'a [U], | T: Table,
&'a U: Insertable<T>,
{
type SqlType = <<&'a U as Insertable<T>>::Columns as InsertableColumns<T>>::SqlType;
fn to_sql(&self, out: &mut QueryBuilder) -> BuildQueryResult {
self.to_insert_sql(out)
}
fn to_insert_sql(&self, out: &mut QueryBuilder) -> BuildQueryResult {
for (i, record) in self.values.into_iter().enumerate() {
if i != 0 {
out.push_sql(", ");
}
try!(record.values().to_insert_sql(out));
}
Ok(())
}
}
impl<C: Column<Table=T>, T: Table> InsertableColumns<T> for C {
type SqlType = <Self as Expression>::SqlType;
fn names(&self) -> String {
Self::name().to_string()
}
} | _marker: PhantomData<T>,
}
impl<'a, T, U> Expression for InsertValues<'a, T, U> where | random_line_split |
persistable.rs | use std::marker::PhantomData;
use expression::Expression;
use query_builder::{QueryBuilder, BuildQueryResult};
use query_source::{Table, Column};
use types::NativeSqlType;
/// Represents that a structure can be used to to insert a new row into the database.
/// Implementations can be automatically generated by
/// [`#[insertable_into]`](https://github.com/sgrif/diesel/tree/master/diesel_codegen#insertable_intotable_name).
/// This is automatically implemented for `&[T]`, `Vec<T>` and `&Vec<T>` for inserting more than
/// one record.
pub trait Insertable<T: Table> {
type Columns: InsertableColumns<T>;
type Values: Expression<SqlType=<Self::Columns as InsertableColumns<T>>::SqlType>;
fn columns() -> Self::Columns;
fn values(self) -> Self::Values;
}
pub trait InsertableColumns<T: Table> {
type SqlType: NativeSqlType;
fn names(&self) -> String;
}
impl<'a, T, U> Insertable<T> for &'a [U] where
T: Table,
&'a U: Insertable<T>,
{
type Columns = <&'a U as Insertable<T>>::Columns;
type Values = InsertValues<'a, T, U>;
fn columns() -> Self::Columns {
<&'a U>::columns()
}
fn values(self) -> Self::Values {
InsertValues {
values: self,
_marker: PhantomData,
}
}
}
impl<'a, T, U> Insertable<T> for &'a Vec<U> where
T: Table,
&'a U: Insertable<T>,
{
type Columns = <&'a U as Insertable<T>>::Columns;
type Values = InsertValues<'a, T, U>;
fn columns() -> Self::Columns {
<&'a U>::columns()
}
fn values(self) -> Self::Values {
InsertValues {
values: &*self,
_marker: PhantomData,
}
}
}
pub struct InsertValues<'a, T, U: 'a> {
values: &'a [U],
_marker: PhantomData<T>,
}
impl<'a, T, U> Expression for InsertValues<'a, T, U> where
T: Table,
&'a U: Insertable<T>,
{
type SqlType = <<&'a U as Insertable<T>>::Columns as InsertableColumns<T>>::SqlType;
fn to_sql(&self, out: &mut QueryBuilder) -> BuildQueryResult {
self.to_insert_sql(out)
}
fn to_insert_sql(&self, out: &mut QueryBuilder) -> BuildQueryResult {
for (i, record) in self.values.into_iter().enumerate() {
if i != 0 |
try!(record.values().to_insert_sql(out));
}
Ok(())
}
}
impl<C: Column<Table=T>, T: Table> InsertableColumns<T> for C {
type SqlType = <Self as Expression>::SqlType;
fn names(&self) -> String {
Self::name().to_string()
}
}
| {
out.push_sql(", ");
} | conditional_block |
scripts.js | exports.BattleScripts = {
init: function() {
for (var i in this.data.Pokedex) {
var hp = this.modData('Pokedex', i).baseStats.hp;
var spd = this.modData('Pokedex', i).baseStats.atk; | var def = this.modData('Pokedex', i).baseStats.def;
var spa = this.modData('Pokedex', i).baseStats.spa;
var spd = this.modData('Pokedex', i).baseStats.spd;
var spe = this.modData('Pokedex', i).baseStats.spe;
this.modData('Pokedex', i).baseStats.hp = spe;
this.modData('Pokedex', i).baseStats.atk = spa;
this.modData('Pokedex', i).baseStats.def = spd;
this.modData('Pokedex', i).baseStats.spa = atk;
this.modData('Pokedex', i).baseStats.spd = def;
this.modData('Pokedex', i).baseStats.spe = hp;
}
}; | random_line_split | |
webpack.config.js | var path = require("path");
var webpack = require("webpack");
var HtmlWebpackPlugin = require('html-webpack-plugin');
// start webpack in production mode by inlining the node env: `NODE_ENV=production webpack -p` (linux) or set NODE_ENV=production&&webpack -p (windows)
var isProd = process.env.NODE_ENV == "production";
console.log(isProd ? "PRODUCTION BUILD" : "DEVELOPMENT BUILD");
var commonEntry = [ 'font-awesome-webpack', './app.js'];
const PORT = 9000;
/**
* Output
* Reference: http://webpack.github.io/docs/configuration.html#output
* Should be an empty object if it's generating a test build
* Karma will handle setting it up for you when it's a test build
*/
var output = {
// Absolute output directory
path: __dirname + '/dist',
// Output path from the view of the page
// Uses webpack-dev-server in development
publicPath: isProd ? '/' : 'http://localhost:' + PORT + '/',
// Filename for entry points. Only adds hash in build mode
filename: isProd ? '[name].[hash].js' : '[name].bundle.js',
// Filename for non-entry points. Only adds hash in build mode
chunkFilename: isProd ? '[name].[hash].js' : '[name].bundle.js'
};
var plugins = [
new HtmlWebpackPlugin({
template: './public/index.html',
inject: 'body'
})
];
if (isProd) |
module.exports = {
debug : !isProd,
devtool : isProd ? 'source-map' : 'eval-source-map', // see http://webpack.github.io/docs/configuration.html#devtool
entry: commonEntry,
output: output,
plugins: plugins,
devServer: {
port: PORT,
contentBase: './public',
stats: 'minimal',
proxy: {
'/api/*': {
target: 'http://127.0.0.1:8080',
secure: false
},
'/eventbus/*': {
target: 'http://127.0.0.1:8080',
secure: false,
ws: true
}
}
},
jshint: {
esversion: 6
},
module : {
preLoaders: [
{
test: /\.js$/, // include .js files
exclude: /node_modules/, // exclude any and all files in the node_modules folder
loader: "jshint-loader"
}
],
loaders : [
{ test: /\.js?$/, exclude: /node_modules/, loader: 'babel-loader' },
{ test: /\.(css|less)$/, loader: 'style-loader!css-loader!less-loader' },
{ test: /\.hbs$/, loader: 'handlebars-loader?helperDirs[]=' + __dirname + '/modules/templateHelpers' },
// loaders for webfonts
{ test: /\.woff(2)?(\?v=[0-9]\.[0-9]\.[0-9])?$/, loader: "url-loader?limit=10000&minetype=application/font-woff" },
{ test: /\.(ttf|eot|svg)(\?v=[0-9]\.[0-9]\.[0-9])?$/, loader: "file-loader" }
// { test:require.resolve('openlayers'), loader:"imports?define=>false" } // workaround for openlayers issue as in https://github.com/openlayers/ol3/issues/3162
]
},
resolve : {
root: [path.resolve('./modules')]
}
}; | {
plugins.push(
new webpack.NoErrorsPlugin(), // Only emit files when there are no errors
new webpack.optimize.UglifyJsPlugin(), // http://webpack.github.io/docs/list-of-plugins.html#uglifyjsplugin
new CopyWebpackPlugin([{
from: __dirname + '/public' // Copy assets from the public folder: https://github.com/kevlened/copy-webpack-plugin
}])
)
} | conditional_block |
webpack.config.js | var path = require("path");
var webpack = require("webpack");
var HtmlWebpackPlugin = require('html-webpack-plugin');
// start webpack in production mode by inlining the node env: `NODE_ENV=production webpack -p` (linux) or set NODE_ENV=production&&webpack -p (windows)
var isProd = process.env.NODE_ENV == "production";
console.log(isProd ? "PRODUCTION BUILD" : "DEVELOPMENT BUILD");
var commonEntry = [ 'font-awesome-webpack', './app.js'];
const PORT = 9000;
/**
* Output
* Reference: http://webpack.github.io/docs/configuration.html#output
* Should be an empty object if it's generating a test build
* Karma will handle setting it up for you when it's a test build
*/
var output = {
// Absolute output directory
path: __dirname + '/dist',
// Output path from the view of the page
// Uses webpack-dev-server in development
publicPath: isProd ? '/' : 'http://localhost:' + PORT + '/',
// Filename for entry points. Only adds hash in build mode
filename: isProd ? '[name].[hash].js' : '[name].bundle.js',
// Filename for non-entry points. Only adds hash in build mode
chunkFilename: isProd ? '[name].[hash].js' : '[name].bundle.js'
};
var plugins = [
new HtmlWebpackPlugin({
template: './public/index.html',
inject: 'body'
})
];
if (isProd) {
plugins.push(
new webpack.NoErrorsPlugin(), // Only emit files when there are no errors
new webpack.optimize.UglifyJsPlugin(), // http://webpack.github.io/docs/list-of-plugins.html#uglifyjsplugin
new CopyWebpackPlugin([{
from: __dirname + '/public' // Copy assets from the public folder: https://github.com/kevlened/copy-webpack-plugin
}])
)
}
module.exports = {
debug : !isProd,
devtool : isProd ? 'source-map' : 'eval-source-map', // see http://webpack.github.io/docs/configuration.html#devtool
entry: commonEntry,
output: output,
plugins: plugins,
devServer: {
port: PORT,
contentBase: './public',
stats: 'minimal', | proxy: {
'/api/*': {
target: 'http://127.0.0.1:8080',
secure: false
},
'/eventbus/*': {
target: 'http://127.0.0.1:8080',
secure: false,
ws: true
}
}
},
jshint: {
esversion: 6
},
module : {
preLoaders: [
{
test: /\.js$/, // include .js files
exclude: /node_modules/, // exclude any and all files in the node_modules folder
loader: "jshint-loader"
}
],
loaders : [
{ test: /\.js?$/, exclude: /node_modules/, loader: 'babel-loader' },
{ test: /\.(css|less)$/, loader: 'style-loader!css-loader!less-loader' },
{ test: /\.hbs$/, loader: 'handlebars-loader?helperDirs[]=' + __dirname + '/modules/templateHelpers' },
// loaders for webfonts
{ test: /\.woff(2)?(\?v=[0-9]\.[0-9]\.[0-9])?$/, loader: "url-loader?limit=10000&minetype=application/font-woff" },
{ test: /\.(ttf|eot|svg)(\?v=[0-9]\.[0-9]\.[0-9])?$/, loader: "file-loader" }
// { test:require.resolve('openlayers'), loader:"imports?define=>false" } // workaround for openlayers issue as in https://github.com/openlayers/ol3/issues/3162
]
},
resolve : {
root: [path.resolve('./modules')]
}
}; | random_line_split | |
__init__.py | # (c) 2012, Daniel Hokka Zakrisson <daniel@hozac.com>
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> and others
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import glob
import imp
import inspect
import os
import os.path
import sys
from ansible import constants as C
from ansible.utils.display import Display
from ansible import errors
MODULE_CACHE = {}
PATH_CACHE = {}
PLUGIN_PATH_CACHE = {}
_basedirs = []
def push_basedir(basedir):
# avoid pushing the same absolute dir more than once
basedir = os.path.realpath(basedir)
if basedir not in _basedirs:
_basedirs.insert(0, basedir)
def get_all_plugin_loaders():
return [(name, obj) for (name, obj) in inspect.getmembers(sys.modules[__name__]) if isinstance(obj, PluginLoader)]
class PluginLoader:
'''
PluginLoader loads plugins from the configured plugin directories.
It searches for plugins by iterating through the combined list of
play basedirs, configured paths, and the python path.
The first match is used.
'''
def __init__(self, class_name, package, config, subdir, aliases={}, required_base_class=None):
self.class_name = class_name
self.base_class = required_base_class
self.package = package
self.config = config
self.subdir = subdir
self.aliases = aliases
if not class_name in MODULE_CACHE:
MODULE_CACHE[class_name] = {}
if not class_name in PATH_CACHE:
PATH_CACHE[class_name] = None
if not class_name in PLUGIN_PATH_CACHE:
PLUGIN_PATH_CACHE[class_name] = {}
self._module_cache = MODULE_CACHE[class_name]
self._paths = PATH_CACHE[class_name]
self._plugin_path_cache = PLUGIN_PATH_CACHE[class_name]
self._extra_dirs = []
self._searched_paths = set()
def __setstate__(self, data):
'''
Deserializer.
'''
class_name = data.get('class_name')
package = data.get('package')
config = data.get('config')
subdir = data.get('subdir')
aliases = data.get('aliases')
base_class = data.get('base_class')
PATH_CACHE[class_name] = data.get('PATH_CACHE')
PLUGIN_PATH_CACHE[class_name] = data.get('PLUGIN_PATH_CACHE')
self.__init__(class_name, package, config, subdir, aliases, base_class)
self._extra_dirs = data.get('_extra_dirs', [])
self._searched_paths = data.get('_searched_paths', set())
def __getstate__(self):
'''
Serializer.
'''
return dict(
class_name = self.class_name,
base_class = self.base_class,
package = self.package,
config = self.config,
subdir = self.subdir,
aliases = self.aliases,
_extra_dirs = self._extra_dirs,
_searched_paths = self._searched_paths,
PATH_CACHE = PATH_CACHE[self.class_name],
PLUGIN_PATH_CACHE = PLUGIN_PATH_CACHE[self.class_name],
)
def print_paths(self):
''' Returns a string suitable for printing of the search path '''
# Uses a list to get the order right
ret = []
for i in self._get_paths():
if i not in ret:
ret.append(i)
return os.pathsep.join(ret)
def _all_directories(self, dir):
results = []
results.append(dir)
for root, subdirs, files in os.walk(dir):
if '__init__.py' in files:
for x in subdirs:
results.append(os.path.join(root,x))
return results
def _get_package_paths(self):
''' Gets the path of a Python package '''
paths = []
if not self.package:
return []
if not hasattr(self, 'package_path'):
m = __import__(self.package)
parts = self.package.split('.')[1:]
self.package_path = os.path.join(os.path.dirname(m.__file__), *parts)
paths.extend(self._all_directories(self.package_path))
return paths
def _get_paths(self):
''' Return a list of paths to search for plugins in '''
if self._paths is not None:
return self._paths
ret = self._extra_dirs[:]
for basedir in _basedirs:
fullpath = os.path.realpath(os.path.join(basedir, self.subdir))
if os.path.isdir(fullpath):
files = glob.glob("%s/*" % fullpath)
# allow directories to be two levels deep
files2 = glob.glob("%s/*/*" % fullpath)
if files2 is not None:
files.extend(files2)
for file in files:
if os.path.isdir(file) and file not in ret:
ret.append(file)
if fullpath not in ret:
ret.append(fullpath)
# look in any configured plugin paths, allow one level deep for subcategories
if self.config is not None:
configured_paths = self.config.split(os.pathsep)
for path in configured_paths:
path = os.path.realpath(os.path.expanduser(path))
contents = glob.glob("%s/*" % path) + glob.glob("%s/*/*" % path)
for c in contents:
if os.path.isdir(c) and c not in ret:
ret.append(c)
if path not in ret:
ret.append(path)
# look for any plugins installed in the package subtree
ret.extend(self._get_package_paths())
# cache and return the result
self._paths = ret
return ret
def add_directory(self, directory, with_subdir=False):
''' Adds an additional directory to the search path '''
directory = os.path.realpath(directory)
if directory is not None:
if with_subdir:
directory = os.path.join(directory, self.subdir)
if directory not in self._extra_dirs:
# append the directory and invalidate the path cache
self._extra_dirs.append(directory)
self._paths = None
def find_plugin(self, name, suffixes=None):
''' Find a plugin named name '''
if not suffixes:
if self.class_name:
suffixes = ['.py']
else:
suffixes = ['.py', '']
potential_names = frozenset('%s%s' % (name, s) for s in suffixes)
for full_name in potential_names:
if full_name in self._plugin_path_cache:
return self._plugin_path_cache[full_name]
found = None
for path in [p for p in self._get_paths() if p not in self._searched_paths]:
if os.path.isdir(path):
try:
full_paths = (os.path.join(path, f) for f in os.listdir(path))
except OSError as e:
d = Display()
d.warning("Error accessing plugin paths: %s" % str(e))
for full_path in (f for f in full_paths if os.path.isfile(f)):
for suffix in suffixes:
if full_path.endswith(suffix):
full_name = os.path.basename(full_path)
break
else: # Yes, this is a for-else: http://bit.ly/1ElPkyg
continue
if full_name not in self._plugin_path_cache:
self._plugin_path_cache[full_name] = full_path
self._searched_paths.add(path)
for full_name in potential_names:
if full_name in self._plugin_path_cache:
return self._plugin_path_cache[full_name]
# if nothing is found, try finding alias/deprecated
if not name.startswith('_'):
for alias_name in ('_%s' % n for n in potential_names):
# We've already cached all the paths at this point
if alias_name in self._plugin_path_cache:
return self._plugin_path_cache[alias_name]
return None
def has_plugin(self, name):
''' Checks if a plugin named name exists '''
return self.find_plugin(name) is not None
__contains__ = has_plugin
def get(self, name, *args, **kwargs):
''' instantiates a plugin of the given name using arguments '''
if name in self.aliases:
name = self.aliases[name]
path = self.find_plugin(name)
if path is None:
return None
if path not in self._module_cache:
self._module_cache[path] = imp.load_source('.'.join([self.package, name]), path)
if kwargs.get('class_only', False):
obj = getattr(self._module_cache[path], self.class_name)
else:
obj = getattr(self._module_cache[path], self.class_name)(*args, **kwargs)
if self.base_class and self.base_class not in [base.__name__ for base in obj.__class__.__bases__]:
return None
return obj
def all(self, *args, **kwargs):
''' instantiates all plugins with the same arguments '''
for i in self._get_paths():
matches = glob.glob(os.path.join(i, "*.py"))
matches.sort()
for path in matches:
name, ext = os.path.splitext(os.path.basename(path))
if name.startswith("_"):
continue
if path not in self._module_cache:
self._module_cache[path] = imp.load_source('.'.join([self.package, name]), path)
if kwargs.get('class_only', False):
obj = getattr(self._module_cache[path], self.class_name)
else:
obj = getattr(self._module_cache[path], self.class_name)(*args, **kwargs)
if self.base_class and self.base_class not in [base.__name__ for base in obj.__class__.__bases__]:
continue
# set extra info on the module, in case we want it later
setattr(obj, '_original_path', path)
yield obj
action_loader = PluginLoader(
'ActionModule',
'ansible.plugins.action',
C.DEFAULT_ACTION_PLUGIN_PATH,
'action_plugins',
required_base_class='ActionBase',
)
cache_loader = PluginLoader(
'CacheModule',
'ansible.plugins.cache', | )
callback_loader = PluginLoader(
'CallbackModule',
'ansible.plugins.callback',
C.DEFAULT_CALLBACK_PLUGIN_PATH,
'callback_plugins',
)
connection_loader = PluginLoader(
'Connection',
'ansible.plugins.connections',
C.DEFAULT_CONNECTION_PLUGIN_PATH,
'connection_plugins',
aliases={'paramiko': 'paramiko_ssh'},
required_base_class='ConnectionBase',
)
shell_loader = PluginLoader(
'ShellModule',
'ansible.plugins.shell',
'shell_plugins',
'shell_plugins',
)
module_loader = PluginLoader(
'',
'ansible.modules',
C.DEFAULT_MODULE_PATH,
'library',
)
lookup_loader = PluginLoader(
'LookupModule',
'ansible.plugins.lookup',
C.DEFAULT_LOOKUP_PLUGIN_PATH,
'lookup_plugins',
required_base_class='LookupBase',
)
vars_loader = PluginLoader(
'VarsModule',
'ansible.plugins.vars',
C.DEFAULT_VARS_PLUGIN_PATH,
'vars_plugins',
)
filter_loader = PluginLoader(
'FilterModule',
'ansible.plugins.filter',
C.DEFAULT_FILTER_PLUGIN_PATH,
'filter_plugins',
)
fragment_loader = PluginLoader(
'ModuleDocFragment',
'ansible.utils.module_docs_fragments',
os.path.join(os.path.dirname(__file__), 'module_docs_fragments'),
'',
)
strategy_loader = PluginLoader(
'StrategyModule',
'ansible.plugins.strategies',
None,
'strategy_plugins',
required_base_class='StrategyBase',
) | C.DEFAULT_CACHE_PLUGIN_PATH,
'cache_plugins', | random_line_split |
__init__.py | # (c) 2012, Daniel Hokka Zakrisson <daniel@hozac.com>
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> and others
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import glob
import imp
import inspect
import os
import os.path
import sys
from ansible import constants as C
from ansible.utils.display import Display
from ansible import errors
MODULE_CACHE = {}
PATH_CACHE = {}
PLUGIN_PATH_CACHE = {}
_basedirs = []
def push_basedir(basedir):
# avoid pushing the same absolute dir more than once
basedir = os.path.realpath(basedir)
if basedir not in _basedirs:
_basedirs.insert(0, basedir)
def get_all_plugin_loaders():
return [(name, obj) for (name, obj) in inspect.getmembers(sys.modules[__name__]) if isinstance(obj, PluginLoader)]
class PluginLoader:
'''
PluginLoader loads plugins from the configured plugin directories.
It searches for plugins by iterating through the combined list of
play basedirs, configured paths, and the python path.
The first match is used.
'''
def __init__(self, class_name, package, config, subdir, aliases={}, required_base_class=None):
self.class_name = class_name
self.base_class = required_base_class
self.package = package
self.config = config
self.subdir = subdir
self.aliases = aliases
if not class_name in MODULE_CACHE:
MODULE_CACHE[class_name] = {}
if not class_name in PATH_CACHE:
PATH_CACHE[class_name] = None
if not class_name in PLUGIN_PATH_CACHE:
PLUGIN_PATH_CACHE[class_name] = {}
self._module_cache = MODULE_CACHE[class_name]
self._paths = PATH_CACHE[class_name]
self._plugin_path_cache = PLUGIN_PATH_CACHE[class_name]
self._extra_dirs = []
self._searched_paths = set()
def __setstate__(self, data):
'''
Deserializer.
'''
class_name = data.get('class_name')
package = data.get('package')
config = data.get('config')
subdir = data.get('subdir')
aliases = data.get('aliases')
base_class = data.get('base_class')
PATH_CACHE[class_name] = data.get('PATH_CACHE')
PLUGIN_PATH_CACHE[class_name] = data.get('PLUGIN_PATH_CACHE')
self.__init__(class_name, package, config, subdir, aliases, base_class)
self._extra_dirs = data.get('_extra_dirs', [])
self._searched_paths = data.get('_searched_paths', set())
def __getstate__(self):
'''
Serializer.
'''
return dict(
class_name = self.class_name,
base_class = self.base_class,
package = self.package,
config = self.config,
subdir = self.subdir,
aliases = self.aliases,
_extra_dirs = self._extra_dirs,
_searched_paths = self._searched_paths,
PATH_CACHE = PATH_CACHE[self.class_name],
PLUGIN_PATH_CACHE = PLUGIN_PATH_CACHE[self.class_name],
)
def print_paths(self):
''' Returns a string suitable for printing of the search path '''
# Uses a list to get the order right
ret = []
for i in self._get_paths():
if i not in ret:
ret.append(i)
return os.pathsep.join(ret)
def _all_directories(self, dir):
results = []
results.append(dir)
for root, subdirs, files in os.walk(dir):
if '__init__.py' in files:
for x in subdirs:
results.append(os.path.join(root,x))
return results
def _get_package_paths(self):
''' Gets the path of a Python package '''
paths = []
if not self.package:
return []
if not hasattr(self, 'package_path'):
m = __import__(self.package)
parts = self.package.split('.')[1:]
self.package_path = os.path.join(os.path.dirname(m.__file__), *parts)
paths.extend(self._all_directories(self.package_path))
return paths
def _get_paths(self):
''' Return a list of paths to search for plugins in '''
if self._paths is not None:
return self._paths
ret = self._extra_dirs[:]
for basedir in _basedirs:
fullpath = os.path.realpath(os.path.join(basedir, self.subdir))
if os.path.isdir(fullpath):
files = glob.glob("%s/*" % fullpath)
# allow directories to be two levels deep
files2 = glob.glob("%s/*/*" % fullpath)
if files2 is not None:
files.extend(files2)
for file in files:
if os.path.isdir(file) and file not in ret:
ret.append(file)
if fullpath not in ret:
ret.append(fullpath)
# look in any configured plugin paths, allow one level deep for subcategories
if self.config is not None:
configured_paths = self.config.split(os.pathsep)
for path in configured_paths:
path = os.path.realpath(os.path.expanduser(path))
contents = glob.glob("%s/*" % path) + glob.glob("%s/*/*" % path)
for c in contents:
if os.path.isdir(c) and c not in ret:
ret.append(c)
if path not in ret:
ret.append(path)
# look for any plugins installed in the package subtree
ret.extend(self._get_package_paths())
# cache and return the result
self._paths = ret
return ret
def add_directory(self, directory, with_subdir=False):
''' Adds an additional directory to the search path '''
directory = os.path.realpath(directory)
if directory is not None:
if with_subdir:
directory = os.path.join(directory, self.subdir)
if directory not in self._extra_dirs:
# append the directory and invalidate the path cache
self._extra_dirs.append(directory)
self._paths = None
def | (self, name, suffixes=None):
''' Find a plugin named name '''
if not suffixes:
if self.class_name:
suffixes = ['.py']
else:
suffixes = ['.py', '']
potential_names = frozenset('%s%s' % (name, s) for s in suffixes)
for full_name in potential_names:
if full_name in self._plugin_path_cache:
return self._plugin_path_cache[full_name]
found = None
for path in [p for p in self._get_paths() if p not in self._searched_paths]:
if os.path.isdir(path):
try:
full_paths = (os.path.join(path, f) for f in os.listdir(path))
except OSError as e:
d = Display()
d.warning("Error accessing plugin paths: %s" % str(e))
for full_path in (f for f in full_paths if os.path.isfile(f)):
for suffix in suffixes:
if full_path.endswith(suffix):
full_name = os.path.basename(full_path)
break
else: # Yes, this is a for-else: http://bit.ly/1ElPkyg
continue
if full_name not in self._plugin_path_cache:
self._plugin_path_cache[full_name] = full_path
self._searched_paths.add(path)
for full_name in potential_names:
if full_name in self._plugin_path_cache:
return self._plugin_path_cache[full_name]
# if nothing is found, try finding alias/deprecated
if not name.startswith('_'):
for alias_name in ('_%s' % n for n in potential_names):
# We've already cached all the paths at this point
if alias_name in self._plugin_path_cache:
return self._plugin_path_cache[alias_name]
return None
def has_plugin(self, name):
''' Checks if a plugin named name exists '''
return self.find_plugin(name) is not None
__contains__ = has_plugin
def get(self, name, *args, **kwargs):
''' instantiates a plugin of the given name using arguments '''
if name in self.aliases:
name = self.aliases[name]
path = self.find_plugin(name)
if path is None:
return None
if path not in self._module_cache:
self._module_cache[path] = imp.load_source('.'.join([self.package, name]), path)
if kwargs.get('class_only', False):
obj = getattr(self._module_cache[path], self.class_name)
else:
obj = getattr(self._module_cache[path], self.class_name)(*args, **kwargs)
if self.base_class and self.base_class not in [base.__name__ for base in obj.__class__.__bases__]:
return None
return obj
def all(self, *args, **kwargs):
''' instantiates all plugins with the same arguments '''
for i in self._get_paths():
matches = glob.glob(os.path.join(i, "*.py"))
matches.sort()
for path in matches:
name, ext = os.path.splitext(os.path.basename(path))
if name.startswith("_"):
continue
if path not in self._module_cache:
self._module_cache[path] = imp.load_source('.'.join([self.package, name]), path)
if kwargs.get('class_only', False):
obj = getattr(self._module_cache[path], self.class_name)
else:
obj = getattr(self._module_cache[path], self.class_name)(*args, **kwargs)
if self.base_class and self.base_class not in [base.__name__ for base in obj.__class__.__bases__]:
continue
# set extra info on the module, in case we want it later
setattr(obj, '_original_path', path)
yield obj
action_loader = PluginLoader(
'ActionModule',
'ansible.plugins.action',
C.DEFAULT_ACTION_PLUGIN_PATH,
'action_plugins',
required_base_class='ActionBase',
)
cache_loader = PluginLoader(
'CacheModule',
'ansible.plugins.cache',
C.DEFAULT_CACHE_PLUGIN_PATH,
'cache_plugins',
)
callback_loader = PluginLoader(
'CallbackModule',
'ansible.plugins.callback',
C.DEFAULT_CALLBACK_PLUGIN_PATH,
'callback_plugins',
)
connection_loader = PluginLoader(
'Connection',
'ansible.plugins.connections',
C.DEFAULT_CONNECTION_PLUGIN_PATH,
'connection_plugins',
aliases={'paramiko': 'paramiko_ssh'},
required_base_class='ConnectionBase',
)
shell_loader = PluginLoader(
'ShellModule',
'ansible.plugins.shell',
'shell_plugins',
'shell_plugins',
)
module_loader = PluginLoader(
'',
'ansible.modules',
C.DEFAULT_MODULE_PATH,
'library',
)
lookup_loader = PluginLoader(
'LookupModule',
'ansible.plugins.lookup',
C.DEFAULT_LOOKUP_PLUGIN_PATH,
'lookup_plugins',
required_base_class='LookupBase',
)
vars_loader = PluginLoader(
'VarsModule',
'ansible.plugins.vars',
C.DEFAULT_VARS_PLUGIN_PATH,
'vars_plugins',
)
filter_loader = PluginLoader(
'FilterModule',
'ansible.plugins.filter',
C.DEFAULT_FILTER_PLUGIN_PATH,
'filter_plugins',
)
fragment_loader = PluginLoader(
'ModuleDocFragment',
'ansible.utils.module_docs_fragments',
os.path.join(os.path.dirname(__file__), 'module_docs_fragments'),
'',
)
strategy_loader = PluginLoader(
'StrategyModule',
'ansible.plugins.strategies',
None,
'strategy_plugins',
required_base_class='StrategyBase',
)
| find_plugin | identifier_name |
__init__.py | # (c) 2012, Daniel Hokka Zakrisson <daniel@hozac.com>
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> and others
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import glob
import imp
import inspect
import os
import os.path
import sys
from ansible import constants as C
from ansible.utils.display import Display
from ansible import errors
MODULE_CACHE = {}
PATH_CACHE = {}
PLUGIN_PATH_CACHE = {}
_basedirs = []
def push_basedir(basedir):
# avoid pushing the same absolute dir more than once
basedir = os.path.realpath(basedir)
if basedir not in _basedirs:
_basedirs.insert(0, basedir)
def get_all_plugin_loaders():
return [(name, obj) for (name, obj) in inspect.getmembers(sys.modules[__name__]) if isinstance(obj, PluginLoader)]
class PluginLoader:
|
action_loader = PluginLoader(
'ActionModule',
'ansible.plugins.action',
C.DEFAULT_ACTION_PLUGIN_PATH,
'action_plugins',
required_base_class='ActionBase',
)
cache_loader = PluginLoader(
'CacheModule',
'ansible.plugins.cache',
C.DEFAULT_CACHE_PLUGIN_PATH,
'cache_plugins',
)
callback_loader = PluginLoader(
'CallbackModule',
'ansible.plugins.callback',
C.DEFAULT_CALLBACK_PLUGIN_PATH,
'callback_plugins',
)
connection_loader = PluginLoader(
'Connection',
'ansible.plugins.connections',
C.DEFAULT_CONNECTION_PLUGIN_PATH,
'connection_plugins',
aliases={'paramiko': 'paramiko_ssh'},
required_base_class='ConnectionBase',
)
shell_loader = PluginLoader(
'ShellModule',
'ansible.plugins.shell',
'shell_plugins',
'shell_plugins',
)
module_loader = PluginLoader(
'',
'ansible.modules',
C.DEFAULT_MODULE_PATH,
'library',
)
lookup_loader = PluginLoader(
'LookupModule',
'ansible.plugins.lookup',
C.DEFAULT_LOOKUP_PLUGIN_PATH,
'lookup_plugins',
required_base_class='LookupBase',
)
vars_loader = PluginLoader(
'VarsModule',
'ansible.plugins.vars',
C.DEFAULT_VARS_PLUGIN_PATH,
'vars_plugins',
)
filter_loader = PluginLoader(
'FilterModule',
'ansible.plugins.filter',
C.DEFAULT_FILTER_PLUGIN_PATH,
'filter_plugins',
)
fragment_loader = PluginLoader(
'ModuleDocFragment',
'ansible.utils.module_docs_fragments',
os.path.join(os.path.dirname(__file__), 'module_docs_fragments'),
'',
)
strategy_loader = PluginLoader(
'StrategyModule',
'ansible.plugins.strategies',
None,
'strategy_plugins',
required_base_class='StrategyBase',
)
| '''
PluginLoader loads plugins from the configured plugin directories.
It searches for plugins by iterating through the combined list of
play basedirs, configured paths, and the python path.
The first match is used.
'''
def __init__(self, class_name, package, config, subdir, aliases={}, required_base_class=None):
self.class_name = class_name
self.base_class = required_base_class
self.package = package
self.config = config
self.subdir = subdir
self.aliases = aliases
if not class_name in MODULE_CACHE:
MODULE_CACHE[class_name] = {}
if not class_name in PATH_CACHE:
PATH_CACHE[class_name] = None
if not class_name in PLUGIN_PATH_CACHE:
PLUGIN_PATH_CACHE[class_name] = {}
self._module_cache = MODULE_CACHE[class_name]
self._paths = PATH_CACHE[class_name]
self._plugin_path_cache = PLUGIN_PATH_CACHE[class_name]
self._extra_dirs = []
self._searched_paths = set()
def __setstate__(self, data):
'''
Deserializer.
'''
class_name = data.get('class_name')
package = data.get('package')
config = data.get('config')
subdir = data.get('subdir')
aliases = data.get('aliases')
base_class = data.get('base_class')
PATH_CACHE[class_name] = data.get('PATH_CACHE')
PLUGIN_PATH_CACHE[class_name] = data.get('PLUGIN_PATH_CACHE')
self.__init__(class_name, package, config, subdir, aliases, base_class)
self._extra_dirs = data.get('_extra_dirs', [])
self._searched_paths = data.get('_searched_paths', set())
def __getstate__(self):
'''
Serializer.
'''
return dict(
class_name = self.class_name,
base_class = self.base_class,
package = self.package,
config = self.config,
subdir = self.subdir,
aliases = self.aliases,
_extra_dirs = self._extra_dirs,
_searched_paths = self._searched_paths,
PATH_CACHE = PATH_CACHE[self.class_name],
PLUGIN_PATH_CACHE = PLUGIN_PATH_CACHE[self.class_name],
)
def print_paths(self):
''' Returns a string suitable for printing of the search path '''
# Uses a list to get the order right
ret = []
for i in self._get_paths():
if i not in ret:
ret.append(i)
return os.pathsep.join(ret)
def _all_directories(self, dir):
results = []
results.append(dir)
for root, subdirs, files in os.walk(dir):
if '__init__.py' in files:
for x in subdirs:
results.append(os.path.join(root,x))
return results
def _get_package_paths(self):
''' Gets the path of a Python package '''
paths = []
if not self.package:
return []
if not hasattr(self, 'package_path'):
m = __import__(self.package)
parts = self.package.split('.')[1:]
self.package_path = os.path.join(os.path.dirname(m.__file__), *parts)
paths.extend(self._all_directories(self.package_path))
return paths
def _get_paths(self):
''' Return a list of paths to search for plugins in '''
if self._paths is not None:
return self._paths
ret = self._extra_dirs[:]
for basedir in _basedirs:
fullpath = os.path.realpath(os.path.join(basedir, self.subdir))
if os.path.isdir(fullpath):
files = glob.glob("%s/*" % fullpath)
# allow directories to be two levels deep
files2 = glob.glob("%s/*/*" % fullpath)
if files2 is not None:
files.extend(files2)
for file in files:
if os.path.isdir(file) and file not in ret:
ret.append(file)
if fullpath not in ret:
ret.append(fullpath)
# look in any configured plugin paths, allow one level deep for subcategories
if self.config is not None:
configured_paths = self.config.split(os.pathsep)
for path in configured_paths:
path = os.path.realpath(os.path.expanduser(path))
contents = glob.glob("%s/*" % path) + glob.glob("%s/*/*" % path)
for c in contents:
if os.path.isdir(c) and c not in ret:
ret.append(c)
if path not in ret:
ret.append(path)
# look for any plugins installed in the package subtree
ret.extend(self._get_package_paths())
# cache and return the result
self._paths = ret
return ret
def add_directory(self, directory, with_subdir=False):
''' Adds an additional directory to the search path '''
directory = os.path.realpath(directory)
if directory is not None:
if with_subdir:
directory = os.path.join(directory, self.subdir)
if directory not in self._extra_dirs:
# append the directory and invalidate the path cache
self._extra_dirs.append(directory)
self._paths = None
def find_plugin(self, name, suffixes=None):
''' Find a plugin named name '''
if not suffixes:
if self.class_name:
suffixes = ['.py']
else:
suffixes = ['.py', '']
potential_names = frozenset('%s%s' % (name, s) for s in suffixes)
for full_name in potential_names:
if full_name in self._plugin_path_cache:
return self._plugin_path_cache[full_name]
found = None
for path in [p for p in self._get_paths() if p not in self._searched_paths]:
if os.path.isdir(path):
try:
full_paths = (os.path.join(path, f) for f in os.listdir(path))
except OSError as e:
d = Display()
d.warning("Error accessing plugin paths: %s" % str(e))
for full_path in (f for f in full_paths if os.path.isfile(f)):
for suffix in suffixes:
if full_path.endswith(suffix):
full_name = os.path.basename(full_path)
break
else: # Yes, this is a for-else: http://bit.ly/1ElPkyg
continue
if full_name not in self._plugin_path_cache:
self._plugin_path_cache[full_name] = full_path
self._searched_paths.add(path)
for full_name in potential_names:
if full_name in self._plugin_path_cache:
return self._plugin_path_cache[full_name]
# if nothing is found, try finding alias/deprecated
if not name.startswith('_'):
for alias_name in ('_%s' % n for n in potential_names):
# We've already cached all the paths at this point
if alias_name in self._plugin_path_cache:
return self._plugin_path_cache[alias_name]
return None
def has_plugin(self, name):
''' Checks if a plugin named name exists '''
return self.find_plugin(name) is not None
__contains__ = has_plugin
def get(self, name, *args, **kwargs):
''' instantiates a plugin of the given name using arguments '''
if name in self.aliases:
name = self.aliases[name]
path = self.find_plugin(name)
if path is None:
return None
if path not in self._module_cache:
self._module_cache[path] = imp.load_source('.'.join([self.package, name]), path)
if kwargs.get('class_only', False):
obj = getattr(self._module_cache[path], self.class_name)
else:
obj = getattr(self._module_cache[path], self.class_name)(*args, **kwargs)
if self.base_class and self.base_class not in [base.__name__ for base in obj.__class__.__bases__]:
return None
return obj
def all(self, *args, **kwargs):
''' instantiates all plugins with the same arguments '''
for i in self._get_paths():
matches = glob.glob(os.path.join(i, "*.py"))
matches.sort()
for path in matches:
name, ext = os.path.splitext(os.path.basename(path))
if name.startswith("_"):
continue
if path not in self._module_cache:
self._module_cache[path] = imp.load_source('.'.join([self.package, name]), path)
if kwargs.get('class_only', False):
obj = getattr(self._module_cache[path], self.class_name)
else:
obj = getattr(self._module_cache[path], self.class_name)(*args, **kwargs)
if self.base_class and self.base_class not in [base.__name__ for base in obj.__class__.__bases__]:
continue
# set extra info on the module, in case we want it later
setattr(obj, '_original_path', path)
yield obj | identifier_body |
__init__.py | # (c) 2012, Daniel Hokka Zakrisson <daniel@hozac.com>
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> and others
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import glob
import imp
import inspect
import os
import os.path
import sys
from ansible import constants as C
from ansible.utils.display import Display
from ansible import errors
MODULE_CACHE = {}
PATH_CACHE = {}
PLUGIN_PATH_CACHE = {}
_basedirs = []
def push_basedir(basedir):
# avoid pushing the same absolute dir more than once
basedir = os.path.realpath(basedir)
if basedir not in _basedirs:
_basedirs.insert(0, basedir)
def get_all_plugin_loaders():
return [(name, obj) for (name, obj) in inspect.getmembers(sys.modules[__name__]) if isinstance(obj, PluginLoader)]
class PluginLoader:
'''
PluginLoader loads plugins from the configured plugin directories.
It searches for plugins by iterating through the combined list of
play basedirs, configured paths, and the python path.
The first match is used.
'''
def __init__(self, class_name, package, config, subdir, aliases={}, required_base_class=None):
self.class_name = class_name
self.base_class = required_base_class
self.package = package
self.config = config
self.subdir = subdir
self.aliases = aliases
if not class_name in MODULE_CACHE:
MODULE_CACHE[class_name] = {}
if not class_name in PATH_CACHE:
PATH_CACHE[class_name] = None
if not class_name in PLUGIN_PATH_CACHE:
PLUGIN_PATH_CACHE[class_name] = {}
self._module_cache = MODULE_CACHE[class_name]
self._paths = PATH_CACHE[class_name]
self._plugin_path_cache = PLUGIN_PATH_CACHE[class_name]
self._extra_dirs = []
self._searched_paths = set()
def __setstate__(self, data):
'''
Deserializer.
'''
class_name = data.get('class_name')
package = data.get('package')
config = data.get('config')
subdir = data.get('subdir')
aliases = data.get('aliases')
base_class = data.get('base_class')
PATH_CACHE[class_name] = data.get('PATH_CACHE')
PLUGIN_PATH_CACHE[class_name] = data.get('PLUGIN_PATH_CACHE')
self.__init__(class_name, package, config, subdir, aliases, base_class)
self._extra_dirs = data.get('_extra_dirs', [])
self._searched_paths = data.get('_searched_paths', set())
def __getstate__(self):
'''
Serializer.
'''
return dict(
class_name = self.class_name,
base_class = self.base_class,
package = self.package,
config = self.config,
subdir = self.subdir,
aliases = self.aliases,
_extra_dirs = self._extra_dirs,
_searched_paths = self._searched_paths,
PATH_CACHE = PATH_CACHE[self.class_name],
PLUGIN_PATH_CACHE = PLUGIN_PATH_CACHE[self.class_name],
)
def print_paths(self):
''' Returns a string suitable for printing of the search path '''
# Uses a list to get the order right
ret = []
for i in self._get_paths():
if i not in ret:
ret.append(i)
return os.pathsep.join(ret)
def _all_directories(self, dir):
results = []
results.append(dir)
for root, subdirs, files in os.walk(dir):
if '__init__.py' in files:
for x in subdirs:
results.append(os.path.join(root,x))
return results
def _get_package_paths(self):
''' Gets the path of a Python package '''
paths = []
if not self.package:
return []
if not hasattr(self, 'package_path'):
m = __import__(self.package)
parts = self.package.split('.')[1:]
self.package_path = os.path.join(os.path.dirname(m.__file__), *parts)
paths.extend(self._all_directories(self.package_path))
return paths
def _get_paths(self):
''' Return a list of paths to search for plugins in '''
if self._paths is not None:
return self._paths
ret = self._extra_dirs[:]
for basedir in _basedirs:
fullpath = os.path.realpath(os.path.join(basedir, self.subdir))
if os.path.isdir(fullpath):
files = glob.glob("%s/*" % fullpath)
# allow directories to be two levels deep
files2 = glob.glob("%s/*/*" % fullpath)
if files2 is not None:
files.extend(files2)
for file in files:
if os.path.isdir(file) and file not in ret:
ret.append(file)
if fullpath not in ret:
ret.append(fullpath)
# look in any configured plugin paths, allow one level deep for subcategories
if self.config is not None:
configured_paths = self.config.split(os.pathsep)
for path in configured_paths:
path = os.path.realpath(os.path.expanduser(path))
contents = glob.glob("%s/*" % path) + glob.glob("%s/*/*" % path)
for c in contents:
if os.path.isdir(c) and c not in ret:
ret.append(c)
if path not in ret:
ret.append(path)
# look for any plugins installed in the package subtree
ret.extend(self._get_package_paths())
# cache and return the result
self._paths = ret
return ret
def add_directory(self, directory, with_subdir=False):
''' Adds an additional directory to the search path '''
directory = os.path.realpath(directory)
if directory is not None:
if with_subdir:
directory = os.path.join(directory, self.subdir)
if directory not in self._extra_dirs:
# append the directory and invalidate the path cache
self._extra_dirs.append(directory)
self._paths = None
def find_plugin(self, name, suffixes=None):
''' Find a plugin named name '''
if not suffixes:
if self.class_name:
suffixes = ['.py']
else:
suffixes = ['.py', '']
potential_names = frozenset('%s%s' % (name, s) for s in suffixes)
for full_name in potential_names:
if full_name in self._plugin_path_cache:
return self._plugin_path_cache[full_name]
found = None
for path in [p for p in self._get_paths() if p not in self._searched_paths]:
if os.path.isdir(path):
try:
full_paths = (os.path.join(path, f) for f in os.listdir(path))
except OSError as e:
d = Display()
d.warning("Error accessing plugin paths: %s" % str(e))
for full_path in (f for f in full_paths if os.path.isfile(f)):
for suffix in suffixes:
if full_path.endswith(suffix):
full_name = os.path.basename(full_path)
break
else: # Yes, this is a for-else: http://bit.ly/1ElPkyg
continue
if full_name not in self._plugin_path_cache:
self._plugin_path_cache[full_name] = full_path
self._searched_paths.add(path)
for full_name in potential_names:
if full_name in self._plugin_path_cache:
return self._plugin_path_cache[full_name]
# if nothing is found, try finding alias/deprecated
if not name.startswith('_'):
for alias_name in ('_%s' % n for n in potential_names):
# We've already cached all the paths at this point
if alias_name in self._plugin_path_cache:
return self._plugin_path_cache[alias_name]
return None
def has_plugin(self, name):
''' Checks if a plugin named name exists '''
return self.find_plugin(name) is not None
__contains__ = has_plugin
def get(self, name, *args, **kwargs):
''' instantiates a plugin of the given name using arguments '''
if name in self.aliases:
name = self.aliases[name]
path = self.find_plugin(name)
if path is None:
return None
if path not in self._module_cache:
self._module_cache[path] = imp.load_source('.'.join([self.package, name]), path)
if kwargs.get('class_only', False):
obj = getattr(self._module_cache[path], self.class_name)
else:
obj = getattr(self._module_cache[path], self.class_name)(*args, **kwargs)
if self.base_class and self.base_class not in [base.__name__ for base in obj.__class__.__bases__]:
return None
return obj
def all(self, *args, **kwargs):
''' instantiates all plugins with the same arguments '''
for i in self._get_paths():
|
action_loader = PluginLoader(
'ActionModule',
'ansible.plugins.action',
C.DEFAULT_ACTION_PLUGIN_PATH,
'action_plugins',
required_base_class='ActionBase',
)
cache_loader = PluginLoader(
'CacheModule',
'ansible.plugins.cache',
C.DEFAULT_CACHE_PLUGIN_PATH,
'cache_plugins',
)
callback_loader = PluginLoader(
'CallbackModule',
'ansible.plugins.callback',
C.DEFAULT_CALLBACK_PLUGIN_PATH,
'callback_plugins',
)
connection_loader = PluginLoader(
'Connection',
'ansible.plugins.connections',
C.DEFAULT_CONNECTION_PLUGIN_PATH,
'connection_plugins',
aliases={'paramiko': 'paramiko_ssh'},
required_base_class='ConnectionBase',
)
shell_loader = PluginLoader(
'ShellModule',
'ansible.plugins.shell',
'shell_plugins',
'shell_plugins',
)
module_loader = PluginLoader(
'',
'ansible.modules',
C.DEFAULT_MODULE_PATH,
'library',
)
lookup_loader = PluginLoader(
'LookupModule',
'ansible.plugins.lookup',
C.DEFAULT_LOOKUP_PLUGIN_PATH,
'lookup_plugins',
required_base_class='LookupBase',
)
vars_loader = PluginLoader(
'VarsModule',
'ansible.plugins.vars',
C.DEFAULT_VARS_PLUGIN_PATH,
'vars_plugins',
)
filter_loader = PluginLoader(
'FilterModule',
'ansible.plugins.filter',
C.DEFAULT_FILTER_PLUGIN_PATH,
'filter_plugins',
)
fragment_loader = PluginLoader(
'ModuleDocFragment',
'ansible.utils.module_docs_fragments',
os.path.join(os.path.dirname(__file__), 'module_docs_fragments'),
'',
)
strategy_loader = PluginLoader(
'StrategyModule',
'ansible.plugins.strategies',
None,
'strategy_plugins',
required_base_class='StrategyBase',
)
| matches = glob.glob(os.path.join(i, "*.py"))
matches.sort()
for path in matches:
name, ext = os.path.splitext(os.path.basename(path))
if name.startswith("_"):
continue
if path not in self._module_cache:
self._module_cache[path] = imp.load_source('.'.join([self.package, name]), path)
if kwargs.get('class_only', False):
obj = getattr(self._module_cache[path], self.class_name)
else:
obj = getattr(self._module_cache[path], self.class_name)(*args, **kwargs)
if self.base_class and self.base_class not in [base.__name__ for base in obj.__class__.__bases__]:
continue
# set extra info on the module, in case we want it later
setattr(obj, '_original_path', path)
yield obj | conditional_block |
breakpoint.py | '''
Created on Jun 11, 2011
@author: mkiyer
'''
class | (object):
def __init__(self):
self.name = None
self.seq5p = None
self.seq3p = None
self.chimera_names = []
@property
def pos(self):
"""
return position of break along sequence measured from 5' -> 3'
"""
return len(self.seq5p)
@staticmethod
def from_list(fields):
b = Breakpoint()
b.name = fields[0]
b.seq5p = fields[1]
b.seq3p = fields[2]
b.chimera_names = fields[3].split(',')
return b
def to_list(self):
fields = [self.name, self.seq5p, self.seq3p]
fields.append(','.join(self.chimera_names))
return fields | Breakpoint | identifier_name |
breakpoint.py | '''
Created on Jun 11, 2011 | @author: mkiyer
'''
class Breakpoint(object):
def __init__(self):
self.name = None
self.seq5p = None
self.seq3p = None
self.chimera_names = []
@property
def pos(self):
"""
return position of break along sequence measured from 5' -> 3'
"""
return len(self.seq5p)
@staticmethod
def from_list(fields):
b = Breakpoint()
b.name = fields[0]
b.seq5p = fields[1]
b.seq3p = fields[2]
b.chimera_names = fields[3].split(',')
return b
def to_list(self):
fields = [self.name, self.seq5p, self.seq3p]
fields.append(','.join(self.chimera_names))
return fields | random_line_split | |
breakpoint.py | '''
Created on Jun 11, 2011
@author: mkiyer
'''
class Breakpoint(object):
def __init__(self):
self.name = None
self.seq5p = None
self.seq3p = None
self.chimera_names = []
@property
def pos(self):
"""
return position of break along sequence measured from 5' -> 3'
"""
return len(self.seq5p)
@staticmethod
def from_list(fields):
|
def to_list(self):
fields = [self.name, self.seq5p, self.seq3p]
fields.append(','.join(self.chimera_names))
return fields | b = Breakpoint()
b.name = fields[0]
b.seq5p = fields[1]
b.seq3p = fields[2]
b.chimera_names = fields[3].split(',')
return b | identifier_body |
promoted_errors.rs | // http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![warn(const_err)]
// compile-pass
// compile-flags: -O
fn main() {
println!("{}", 0u32 - 1);
let _x = 0u32 - 1;
//~^ WARN const_err
println!("{}", 1/(1-1));
//~^ WARN const_err
let _x = 1/(1-1);
//~^ WARN const_err
//~| WARN const_err
println!("{}", 1/(false as u32));
//~^ WARN const_err
let _x = 1/(false as u32);
//~^ WARN const_err
//~| WARN const_err
} | // Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at | random_line_split | |
promoted_errors.rs | // Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![warn(const_err)]
// compile-pass
// compile-flags: -O
fn main() | {
println!("{}", 0u32 - 1);
let _x = 0u32 - 1;
//~^ WARN const_err
println!("{}", 1/(1-1));
//~^ WARN const_err
let _x = 1/(1-1);
//~^ WARN const_err
//~| WARN const_err
println!("{}", 1/(false as u32));
//~^ WARN const_err
let _x = 1/(false as u32);
//~^ WARN const_err
//~| WARN const_err
} | identifier_body | |
promoted_errors.rs | // Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![warn(const_err)]
// compile-pass
// compile-flags: -O
fn | () {
println!("{}", 0u32 - 1);
let _x = 0u32 - 1;
//~^ WARN const_err
println!("{}", 1/(1-1));
//~^ WARN const_err
let _x = 1/(1-1);
//~^ WARN const_err
//~| WARN const_err
println!("{}", 1/(false as u32));
//~^ WARN const_err
let _x = 1/(false as u32);
//~^ WARN const_err
//~| WARN const_err
}
| main | identifier_name |
lib.rs | extern crate yassy;
extern crate gnuplot;
use yassy::utils;
use yassy::utils::*;
use self::gnuplot::*;
pub fn | (nt: usize, nppt: usize, nn: usize, fs: f64, fhabs: &[f64], outname: &str) {
// The axis of fhabs has nn/2+1 points, representing frequencies from 0 to fl/2,
// or i*(fl/2)/(nn/2) = i*fl/nn = i*fs*(nppt-1)/nn for i=0..nn/2. (Because
// fl=1/Tl=fs*(nppt-1)) We are only interested in
// frequencies up to around fi=60KHz, or i= 60KHz*nn/(fs*(nppt-1)).
let npptf64=nppt as f64;
let ntf64=nt as f64;
// Find index such that the horizontal axis of the plot is fmax, i.e.
// i = fmax*nn/(fs*(nppt-1))
let fac = (nn as f64)/(fs*(npptf64-1f64));
let i_fi = (60000f64*fac).round();
println!("fac: {}", fac);
println!("i_fi: {}", i_fi);
let mut f = vec![0f64; nn/2+1];
// display kHz in plot
utils::linspace(&mut f, 0f64, ((nn/2+1) as f64)/fac/1000f64);
let f_cut = &f[..i_fi as usize];
let fhabs_cut = &fhabs[..i_fi as usize];
let mut fg = gnuplot::Figure::new();
fg.set_terminal("svg", outname);
// yticks
let yticks = [0.00001f64,0.0001f64,0.001f64,0.01f64,0.1f64,1f64];
fg.axes2d()
.set_y_log(Some(10f64))
.lines(f_cut.iter(), fhabs_cut.iter(), &[Color("blue")])
.lines(&[20f64,20f64], &[0f64, 1f64], &[Color("green")])
.lines(&[fs/1000f64,fs/1000f64], &[0f64, 1f64], &[Color("red")])
.lines(&[fs/1000f64-20f64,fs/1000f64-20f64], &[0f64, 1f64], &[Color("red")])
.set_y_range(Fix(0.00001f64), Fix(1f64))
.set_y_ticks_custom(yticks.iter().map(|x| Major(*x as f64, Fix("10^{%T}".to_string()))),&[],&[])
.set_x_label("Frequency in kHz",&[])
.set_title("Amplitude spectrum",&[]);
fg.show();
}
| plot_ampl_spec | identifier_name |
lib.rs | extern crate yassy;
extern crate gnuplot;
use yassy::utils;
use yassy::utils::*;
use self::gnuplot::*;
pub fn plot_ampl_spec(nt: usize, nppt: usize, nn: usize, fs: f64, fhabs: &[f64], outname: &str) | {
// The axis of fhabs has nn/2+1 points, representing frequencies from 0 to fl/2,
// or i*(fl/2)/(nn/2) = i*fl/nn = i*fs*(nppt-1)/nn for i=0..nn/2. (Because
// fl=1/Tl=fs*(nppt-1)) We are only interested in
// frequencies up to around fi=60KHz, or i= 60KHz*nn/(fs*(nppt-1)).
let npptf64=nppt as f64;
let ntf64=nt as f64;
// Find index such that the horizontal axis of the plot is fmax, i.e.
// i = fmax*nn/(fs*(nppt-1))
let fac = (nn as f64)/(fs*(npptf64-1f64));
let i_fi = (60000f64*fac).round();
println!("fac: {}", fac);
println!("i_fi: {}", i_fi);
let mut f = vec![0f64; nn/2+1];
// display kHz in plot
utils::linspace(&mut f, 0f64, ((nn/2+1) as f64)/fac/1000f64);
let f_cut = &f[..i_fi as usize];
let fhabs_cut = &fhabs[..i_fi as usize];
let mut fg = gnuplot::Figure::new();
fg.set_terminal("svg", outname);
// yticks
let yticks = [0.00001f64,0.0001f64,0.001f64,0.01f64,0.1f64,1f64];
fg.axes2d()
.set_y_log(Some(10f64))
.lines(f_cut.iter(), fhabs_cut.iter(), &[Color("blue")])
.lines(&[20f64,20f64], &[0f64, 1f64], &[Color("green")])
.lines(&[fs/1000f64,fs/1000f64], &[0f64, 1f64], &[Color("red")])
.lines(&[fs/1000f64-20f64,fs/1000f64-20f64], &[0f64, 1f64], &[Color("red")])
.set_y_range(Fix(0.00001f64), Fix(1f64))
.set_y_ticks_custom(yticks.iter().map(|x| Major(*x as f64, Fix("10^{%T}".to_string()))),&[],&[])
.set_x_label("Frequency in kHz",&[])
.set_title("Amplitude spectrum",&[]);
fg.show();
} | identifier_body | |
lib.rs | extern crate yassy;
extern crate gnuplot;
use yassy::utils;
use yassy::utils::*;
use self::gnuplot::*;
pub fn plot_ampl_spec(nt: usize, nppt: usize, nn: usize, fs: f64, fhabs: &[f64], outname: &str) {
// The axis of fhabs has nn/2+1 points, representing frequencies from 0 to fl/2,
// or i*(fl/2)/(nn/2) = i*fl/nn = i*fs*(nppt-1)/nn for i=0..nn/2. (Because
// fl=1/Tl=fs*(nppt-1)) We are only interested in | let ntf64=nt as f64;
// Find index such that the horizontal axis of the plot is fmax, i.e.
// i = fmax*nn/(fs*(nppt-1))
let fac = (nn as f64)/(fs*(npptf64-1f64));
let i_fi = (60000f64*fac).round();
println!("fac: {}", fac);
println!("i_fi: {}", i_fi);
let mut f = vec![0f64; nn/2+1];
// display kHz in plot
utils::linspace(&mut f, 0f64, ((nn/2+1) as f64)/fac/1000f64);
let f_cut = &f[..i_fi as usize];
let fhabs_cut = &fhabs[..i_fi as usize];
let mut fg = gnuplot::Figure::new();
fg.set_terminal("svg", outname);
// yticks
let yticks = [0.00001f64,0.0001f64,0.001f64,0.01f64,0.1f64,1f64];
fg.axes2d()
.set_y_log(Some(10f64))
.lines(f_cut.iter(), fhabs_cut.iter(), &[Color("blue")])
.lines(&[20f64,20f64], &[0f64, 1f64], &[Color("green")])
.lines(&[fs/1000f64,fs/1000f64], &[0f64, 1f64], &[Color("red")])
.lines(&[fs/1000f64-20f64,fs/1000f64-20f64], &[0f64, 1f64], &[Color("red")])
.set_y_range(Fix(0.00001f64), Fix(1f64))
.set_y_ticks_custom(yticks.iter().map(|x| Major(*x as f64, Fix("10^{%T}".to_string()))),&[],&[])
.set_x_label("Frequency in kHz",&[])
.set_title("Amplitude spectrum",&[]);
fg.show();
} | // frequencies up to around fi=60KHz, or i= 60KHz*nn/(fs*(nppt-1)).
let npptf64=nppt as f64; | random_line_split |
log.py | """
Logger classes for the ZAP CLI.
.. moduleauthor:: Daniel Grunwell (grunny)
"""
import logging
import sys
from termcolor import colored
class ColorStreamHandler(logging.StreamHandler):
"""
StreamHandler that prints color. This is used by the console client.
"""
level_map = {
logging.DEBUG: ('magenta', ['bold']),
logging.INFO: ('cyan', ['bold']),
logging.WARNING: ('yellow', ['bold']),
logging.ERROR: ('red', ['bold']),
logging.CRITICAL: ('red', ['bold', 'reverse'])
}
@property
def is_tty(self):
"""is the stream a tty?"""
isatty = getattr(self.stream, 'isatty', None)
return isatty and isatty()
def emit(self, record):
colorize = 'console' in globals() and getattr(console, 'colorize', False)
if self.is_tty and colorize:
color, attr = self.level_map[record.levelno]
prefix = colored(str('[' + record.levelname + ']').ljust(18), color, attrs=attr)
if hasattr(record, 'highlight') and record.highlight:
record.msg = colored(record.msg, color, attrs=['bold', 'reverse'])
else:
prefix = str('[' + record.levelname + ']').ljust(18)
record.msg = prefix + record.msg
logging.StreamHandler.emit(self, record)
class ConsoleLogger(logging.Logger):
"""Log to the console with some color decorations."""
def | (self, name):
super(ConsoleLogger, self).__init__(name)
self.setLevel(logging.DEBUG)
self.addHandler(ColorStreamHandler(sys.stdout))
# Save the current logger
default_logger_class = logging.getLoggerClass()
# Console logging for CLI
logging.setLoggerClass(ConsoleLogger)
console = logging.getLogger('zap')
# Restore the previous logger
logging.setLoggerClass(default_logger_class)
| __init__ | identifier_name |
log.py | """
Logger classes for the ZAP CLI.
.. moduleauthor:: Daniel Grunwell (grunny)
"""
import logging
import sys
from termcolor import colored
class ColorStreamHandler(logging.StreamHandler):
"""
StreamHandler that prints color. This is used by the console client.
"""
level_map = {
logging.DEBUG: ('magenta', ['bold']),
logging.INFO: ('cyan', ['bold']),
logging.WARNING: ('yellow', ['bold']),
logging.ERROR: ('red', ['bold']),
logging.CRITICAL: ('red', ['bold', 'reverse'])
}
@property
def is_tty(self):
"""is the stream a tty?"""
isatty = getattr(self.stream, 'isatty', None)
return isatty and isatty()
def emit(self, record):
colorize = 'console' in globals() and getattr(console, 'colorize', False)
if self.is_tty and colorize:
|
else:
prefix = str('[' + record.levelname + ']').ljust(18)
record.msg = prefix + record.msg
logging.StreamHandler.emit(self, record)
class ConsoleLogger(logging.Logger):
"""Log to the console with some color decorations."""
def __init__(self, name):
super(ConsoleLogger, self).__init__(name)
self.setLevel(logging.DEBUG)
self.addHandler(ColorStreamHandler(sys.stdout))
# Save the current logger
default_logger_class = logging.getLoggerClass()
# Console logging for CLI
logging.setLoggerClass(ConsoleLogger)
console = logging.getLogger('zap')
# Restore the previous logger
logging.setLoggerClass(default_logger_class)
| color, attr = self.level_map[record.levelno]
prefix = colored(str('[' + record.levelname + ']').ljust(18), color, attrs=attr)
if hasattr(record, 'highlight') and record.highlight:
record.msg = colored(record.msg, color, attrs=['bold', 'reverse']) | conditional_block |
log.py | """
Logger classes for the ZAP CLI.
.. moduleauthor:: Daniel Grunwell (grunny)
"""
import logging
import sys
from termcolor import colored
class ColorStreamHandler(logging.StreamHandler):
"""
StreamHandler that prints color. This is used by the console client.
"""
level_map = {
logging.DEBUG: ('magenta', ['bold']),
logging.INFO: ('cyan', ['bold']),
logging.WARNING: ('yellow', ['bold']),
logging.ERROR: ('red', ['bold']),
logging.CRITICAL: ('red', ['bold', 'reverse'])
}
@property
def is_tty(self):
|
def emit(self, record):
colorize = 'console' in globals() and getattr(console, 'colorize', False)
if self.is_tty and colorize:
color, attr = self.level_map[record.levelno]
prefix = colored(str('[' + record.levelname + ']').ljust(18), color, attrs=attr)
if hasattr(record, 'highlight') and record.highlight:
record.msg = colored(record.msg, color, attrs=['bold', 'reverse'])
else:
prefix = str('[' + record.levelname + ']').ljust(18)
record.msg = prefix + record.msg
logging.StreamHandler.emit(self, record)
class ConsoleLogger(logging.Logger):
"""Log to the console with some color decorations."""
def __init__(self, name):
super(ConsoleLogger, self).__init__(name)
self.setLevel(logging.DEBUG)
self.addHandler(ColorStreamHandler(sys.stdout))
# Save the current logger
default_logger_class = logging.getLoggerClass()
# Console logging for CLI
logging.setLoggerClass(ConsoleLogger)
console = logging.getLogger('zap')
# Restore the previous logger
logging.setLoggerClass(default_logger_class)
| """is the stream a tty?"""
isatty = getattr(self.stream, 'isatty', None)
return isatty and isatty() | identifier_body |
log.py | """
Logger classes for the ZAP CLI.
.. moduleauthor:: Daniel Grunwell (grunny)
"""
import logging
import sys
from termcolor import colored
class ColorStreamHandler(logging.StreamHandler):
"""
StreamHandler that prints color. This is used by the console client.
"""
level_map = {
logging.DEBUG: ('magenta', ['bold']),
logging.INFO: ('cyan', ['bold']),
logging.WARNING: ('yellow', ['bold']),
logging.ERROR: ('red', ['bold']),
logging.CRITICAL: ('red', ['bold', 'reverse'])
}
@property
def is_tty(self):
"""is the stream a tty?"""
isatty = getattr(self.stream, 'isatty', None)
return isatty and isatty()
def emit(self, record):
colorize = 'console' in globals() and getattr(console, 'colorize', False)
if self.is_tty and colorize:
color, attr = self.level_map[record.levelno]
prefix = colored(str('[' + record.levelname + ']').ljust(18), color, attrs=attr)
if hasattr(record, 'highlight') and record.highlight:
record.msg = colored(record.msg, color, attrs=['bold', 'reverse'])
else:
prefix = str('[' + record.levelname + ']').ljust(18)
record.msg = prefix + record.msg
logging.StreamHandler.emit(self, record)
class ConsoleLogger(logging.Logger):
"""Log to the console with some color decorations."""
def __init__(self, name):
super(ConsoleLogger, self).__init__(name) | # Save the current logger
default_logger_class = logging.getLoggerClass()
# Console logging for CLI
logging.setLoggerClass(ConsoleLogger)
console = logging.getLogger('zap')
# Restore the previous logger
logging.setLoggerClass(default_logger_class) | self.setLevel(logging.DEBUG)
self.addHandler(ColorStreamHandler(sys.stdout))
| random_line_split |
SendTestEmail.controller.js | angular.module("umbraco").controller("SendTestEmailController.Controller", function ($scope, assetsService, $http, $routeParams, notificationsService) {
$scope.loading = false;
$scope.init = function () {
if ($scope.model.value == null || $scope.model.value == "") {
// Set default if nothing was set yet
$scope.model.value = {
recipients: [{}],
tags: [{}],
}
} else if ($scope.model.value.constructor.name === 'Array') {
// For old versions, $scope.model.value was the array of tags.
// We have made $scope.model.value into an object, with a key 'tags', among others.
// If we see an old model.value (an Array), transform it into the new format
var tags = $scope.model.value;
$scope.model.value = {
recipients: [{}],
tags: tags,
}; | $scope.sendTestEmail = function () {
$scope.loading = true;
if ($scope.model.value.recipient != "") {
$http.post("/base/PerplexMail/SendTestMail?id=" + $routeParams.id, { EmailAddresses: _.pluck($scope.model.value.recipients, 'value'), EmailNodeId: $routeParams.id, Tags: $scope.model.value.tags })
.then(function (response) {
// Notify the user
notificationsService.add({ type: (response.data.Success ? "success" : "error"), headline: response.data.Message });
$scope.loading = false;
}), function (err) {
//display the error
notificationsService.error(err.errorMsg);
$scope.loading = false;
};
}
}
$scope.addRecipient = function () {
$scope.model.value.recipients.push({});
}
$scope.removeRecipient = function (index) {
// Laatste element gaan we niet verwijderen, maar leegmaken
if ($scope.model.value.recipients.length === 1) {
$scope.model.value.recipients = [{}];
} else {
$scope.model.value.recipients.splice(index, 1);
}
}
$scope.addTag = function () {
// Add an empty value
$scope.model.value.tags.push({});
};
$scope.removeTag = function (index) {
// Laatste element gaan we niet verwijderen, maar leegmaken
if ($scope.model.value.tags.length === 1) {
$scope.model.value.tags = [{}];
} else {
$scope.model.value.tags.splice(index, 1);
}
};
}); | }
};
| random_line_split |
karma.conf.js | // Karma configuration
// Generated on Wed Sep 21 2016 00:37:04 GMT+0900 (KST)
module.exports = function (config) {
var configuration = {
// base path that will be used to resolve all patterns (eg. files, exclude)
basePath: '',
// frameworks to use
// available frameworks: https://npmjs.org/browse/keyword/karma-adapter
frameworks: ['mocha'],
// list of files / patterns to load in the browser
files: [
'dist/ax5core.js',
'karma.test.js'
],
// list of files to exclude
exclude: [],
// preprocess matching files before serving them to the browser
// available preprocessors: https://npmjs.org/browse/keyword/karma-preprocessor
preprocessors: {},
// test results reporter to use
// possible values: 'dots', 'progress'
// available reporters: https://npmjs.org/browse/keyword/karma-reporter
reporters: ['progress'],
// web server port
port: 9876,
// enable / disable colors in the output (reporters and logs)
colors: true,
// level of logging
// possible values: config.LOG_DISABLE || config.LOG_ERROR || config.LOG_WARN || config.LOG_INFO || config.LOG_DEBUG
logLevel: config.LOG_INFO,
// enable / disable watching file and executing tests whenever any file changes
autoWatch: true,
// start these browsers
// available browser launchers: https://npmjs.org/browse/keyword/karma-launcher
browsers: ['Chrome', 'Firefox', 'Safari', 'IE'],
customLaunchers: {
Chrome_travis_ci: {
base: 'Chrome',
flags: ['--no-sandbox']
}
},
singleRun: true,
concurrency: Infinity
};
if (process.env.TRAVIS) |
config.set()
}
| {
configuration.browsers = ['Chrome_travis_ci'];
} | conditional_block |
karma.conf.js | // Karma configuration
// Generated on Wed Sep 21 2016 00:37:04 GMT+0900 (KST)
module.exports = function (config) {
var configuration = {
// base path that will be used to resolve all patterns (eg. files, exclude)
basePath: '',
// frameworks to use
// available frameworks: https://npmjs.org/browse/keyword/karma-adapter
frameworks: ['mocha'],
// list of files / patterns to load in the browser
files: [
'dist/ax5core.js',
'karma.test.js'
],
// list of files to exclude
exclude: [],
// preprocess matching files before serving them to the browser
// available preprocessors: https://npmjs.org/browse/keyword/karma-preprocessor
preprocessors: {},
// test results reporter to use
// possible values: 'dots', 'progress'
// available reporters: https://npmjs.org/browse/keyword/karma-reporter
reporters: ['progress'],
// web server port
port: 9876,
// enable / disable colors in the output (reporters and logs)
colors: true,
// level of logging
// possible values: config.LOG_DISABLE || config.LOG_ERROR || config.LOG_WARN || config.LOG_INFO || config.LOG_DEBUG
logLevel: config.LOG_INFO,
// enable / disable watching file and executing tests whenever any file changes
autoWatch: true,
// start these browsers
// available browser launchers: https://npmjs.org/browse/keyword/karma-launcher
browsers: ['Chrome', 'Firefox', 'Safari', 'IE'],
customLaunchers: {
Chrome_travis_ci: {
base: 'Chrome',
flags: ['--no-sandbox']
}
},
singleRun: true,
concurrency: Infinity
};
| } | if (process.env.TRAVIS) {
configuration.browsers = ['Chrome_travis_ci'];
}
config.set() | random_line_split |
__init__.py | #-*- coding: utf-8 -*-
# Author: Matt Earnshaw <matt@earnshaw.org.uk>
from __future__ import absolute_import | from sunpy.io import UnrecognizedFileTypeError
class Plotman(object):
""" Wraps a MainWindow so PlotMan instances can be created via the CLI.
Examples
--------
from sunpy.gui import Plotman
plots = Plotman("data/examples")
plots.show()
"""
def __init__(self, *paths):
""" *paths: directories containing FITS paths
or FITS paths to be opened in PlotMan """
self.app = QApplication(sys.argv)
self.main = MainWindow()
self.open_files(paths)
def open_files(self, inputs):
VALID_EXTENSIONS = [".jp2", ".fits", ".fts"]
to_open = []
# Determine files to process
for input_ in inputs:
if os.path.isfile(input_):
to_open.append(input_)
elif os.path.isdir(input_):
for file_ in os.listdir(input_):
to_open.append(file_)
else:
raise IOError("Path " + input_ + " does not exist.")
# Load files
for filepath in to_open:
name, ext = os.path.splitext(filepath) #pylint: disable=W0612
if ext.lower() in VALID_EXTENSIONS:
try:
self.main.add_tab(filepath, os.path.basename(filepath))
except UnrecognizedFileTypeError:
pass
def show(self):
self.main.show()
self.app.exec_()
if __name__=="__main__":
from sunpy.gui import Plotman
plots = Plotman(sunpy.AIA_171_IMAGE)
plots.show() | import os
import sys
import sunpy
from PyQt4.QtGui import QApplication
from sunpy.gui.mainwindow import MainWindow | random_line_split |
__init__.py | #-*- coding: utf-8 -*-
# Author: Matt Earnshaw <matt@earnshaw.org.uk>
from __future__ import absolute_import
import os
import sys
import sunpy
from PyQt4.QtGui import QApplication
from sunpy.gui.mainwindow import MainWindow
from sunpy.io import UnrecognizedFileTypeError
class Plotman(object):
""" Wraps a MainWindow so PlotMan instances can be created via the CLI.
Examples
--------
from sunpy.gui import Plotman
plots = Plotman("data/examples")
plots.show()
"""
def __init__(self, *paths):
|
def open_files(self, inputs):
VALID_EXTENSIONS = [".jp2", ".fits", ".fts"]
to_open = []
# Determine files to process
for input_ in inputs:
if os.path.isfile(input_):
to_open.append(input_)
elif os.path.isdir(input_):
for file_ in os.listdir(input_):
to_open.append(file_)
else:
raise IOError("Path " + input_ + " does not exist.")
# Load files
for filepath in to_open:
name, ext = os.path.splitext(filepath) #pylint: disable=W0612
if ext.lower() in VALID_EXTENSIONS:
try:
self.main.add_tab(filepath, os.path.basename(filepath))
except UnrecognizedFileTypeError:
pass
def show(self):
self.main.show()
self.app.exec_()
if __name__=="__main__":
from sunpy.gui import Plotman
plots = Plotman(sunpy.AIA_171_IMAGE)
plots.show()
| """ *paths: directories containing FITS paths
or FITS paths to be opened in PlotMan """
self.app = QApplication(sys.argv)
self.main = MainWindow()
self.open_files(paths) | identifier_body |
__init__.py | #-*- coding: utf-8 -*-
# Author: Matt Earnshaw <matt@earnshaw.org.uk>
from __future__ import absolute_import
import os
import sys
import sunpy
from PyQt4.QtGui import QApplication
from sunpy.gui.mainwindow import MainWindow
from sunpy.io import UnrecognizedFileTypeError
class Plotman(object):
""" Wraps a MainWindow so PlotMan instances can be created via the CLI.
Examples
--------
from sunpy.gui import Plotman
plots = Plotman("data/examples")
plots.show()
"""
def __init__(self, *paths):
""" *paths: directories containing FITS paths
or FITS paths to be opened in PlotMan """
self.app = QApplication(sys.argv)
self.main = MainWindow()
self.open_files(paths)
def open_files(self, inputs):
VALID_EXTENSIONS = [".jp2", ".fits", ".fts"]
to_open = []
# Determine files to process
for input_ in inputs:
if os.path.isfile(input_):
|
elif os.path.isdir(input_):
for file_ in os.listdir(input_):
to_open.append(file_)
else:
raise IOError("Path " + input_ + " does not exist.")
# Load files
for filepath in to_open:
name, ext = os.path.splitext(filepath) #pylint: disable=W0612
if ext.lower() in VALID_EXTENSIONS:
try:
self.main.add_tab(filepath, os.path.basename(filepath))
except UnrecognizedFileTypeError:
pass
def show(self):
self.main.show()
self.app.exec_()
if __name__=="__main__":
from sunpy.gui import Plotman
plots = Plotman(sunpy.AIA_171_IMAGE)
plots.show()
| to_open.append(input_) | conditional_block |
__init__.py | #-*- coding: utf-8 -*-
# Author: Matt Earnshaw <matt@earnshaw.org.uk>
from __future__ import absolute_import
import os
import sys
import sunpy
from PyQt4.QtGui import QApplication
from sunpy.gui.mainwindow import MainWindow
from sunpy.io import UnrecognizedFileTypeError
class Plotman(object):
""" Wraps a MainWindow so PlotMan instances can be created via the CLI.
Examples
--------
from sunpy.gui import Plotman
plots = Plotman("data/examples")
plots.show()
"""
def __init__(self, *paths):
""" *paths: directories containing FITS paths
or FITS paths to be opened in PlotMan """
self.app = QApplication(sys.argv)
self.main = MainWindow()
self.open_files(paths)
def open_files(self, inputs):
VALID_EXTENSIONS = [".jp2", ".fits", ".fts"]
to_open = []
# Determine files to process
for input_ in inputs:
if os.path.isfile(input_):
to_open.append(input_)
elif os.path.isdir(input_):
for file_ in os.listdir(input_):
to_open.append(file_)
else:
raise IOError("Path " + input_ + " does not exist.")
# Load files
for filepath in to_open:
name, ext = os.path.splitext(filepath) #pylint: disable=W0612
if ext.lower() in VALID_EXTENSIONS:
try:
self.main.add_tab(filepath, os.path.basename(filepath))
except UnrecognizedFileTypeError:
pass
def | (self):
self.main.show()
self.app.exec_()
if __name__=="__main__":
from sunpy.gui import Plotman
plots = Plotman(sunpy.AIA_171_IMAGE)
plots.show()
| show | identifier_name |
nodejs.py | #!/usr/bin/env python
# Copyright 2016 DIANA-HEP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import subprocess
import tempfile
import os
def write(vegaSpec, outputFile, format=None):
"""Use the 'vega' package in Nodejs to write to SVG or PNG files.
Unlike interactive plotting, this does not require a round trip through a web browser, but it does require a
Nodejs installation on your computer (to evaluate the Javascript).
To install the prerequisites on an Ubuntu system, do
# Cairo dependencies for generating PNG:
sudo apt-get install install libcairo2-dev libjpeg-dev libgif-dev libpango1.0-dev build-essential g++
# Nodejs and its package manager, npm:
sudo apt-get install npm
# Get the 'vega' package with npm; user-install, not global (no sudo)!
npm install vega
Parameters:
vegaSpec (string or dict): JSON string or its dict-of-dicts equivalent
outputFile (string or None): output file name or None to return output as a string
format ('svg', 'png', or None): None (default) guesses format from outputFile extension
"""
if format is None and outputFile is None:
format = "svg"
elif format is None and outputFile.endswith(".svg"):
format = "svg"
elif format is None and outputFile.endswith(".png"):
format = "png"
else:
raise IOError("Could not infer format from outputFile")
if format == "png":
cmd = "vg2png"
elif format == "svg":
cmd = "vg2svg"
else:
raise IOError("Only 'png' and 'svg' output is supported.")
npmbin = subprocess.Popen(["npm", "bin"], stdout=subprocess.PIPE)
if npmbin.wait() == 0:
npmbin = npmbin.stdout.read().strip()
else:
|
tmp = tempfile.NamedTemporaryFile(delete=False)
if isinstance(vegaSpec, dict):
vegaSpec = json.dump(tmp, vegaSpec)
else:
tmp.write(vegaSpec)
tmp.close()
if outputFile is None:
vg2x = subprocess.Popen([cmd, tmp.name], stdout=subprocess.PIPE, env=dict(
os.environ, PATH=npmbin + ":" + os.environ.get("PATH", "")))
if vg2x.wait() == 0:
return vg2x.stdout.read()
else:
os.unlink(tmp.name)
raise IOError("Command '{0}' failed; if it's not installed, install it with 'npm install vega'".format(cmd))
else:
vg2x = subprocess.Popen([cmd, tmp.name, outputFile], stdout=subprocess.PIPE,
env=dict(os.environ, PATH=npmbin + ":" + os.environ.get("PATH", "")))
if vg2x.wait() != 0:
os.unlink(tmp.name)
raise IOError("Command '{0}' failed; if it's not installed, install it with 'npm install vega'".format(cmd))
| raise IOError("Nodejs Package Manager 'npm' must be installed to use nodejs.write function.") | conditional_block |
nodejs.py | #!/usr/bin/env python
# Copyright 2016 DIANA-HEP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import subprocess
import tempfile
import os
def write(vegaSpec, outputFile, format=None):
"""Use the 'vega' package in Nodejs to write to SVG or PNG files.
Unlike interactive plotting, this does not require a round trip through a web browser, but it does require a
Nodejs installation on your computer (to evaluate the Javascript).
To install the prerequisites on an Ubuntu system, do
# Cairo dependencies for generating PNG:
sudo apt-get install install libcairo2-dev libjpeg-dev libgif-dev libpango1.0-dev build-essential g++
# Nodejs and its package manager, npm:
sudo apt-get install npm
# Get the 'vega' package with npm; user-install, not global (no sudo)!
npm install vega
Parameters:
vegaSpec (string or dict): JSON string or its dict-of-dicts equivalent
outputFile (string or None): output file name or None to return output as a string
format ('svg', 'png', or None): None (default) guesses format from outputFile extension
"""
if format is None and outputFile is None:
format = "svg"
elif format is None and outputFile.endswith(".svg"):
format = "svg"
elif format is None and outputFile.endswith(".png"):
format = "png"
else:
raise IOError("Could not infer format from outputFile")
if format == "png":
cmd = "vg2png"
elif format == "svg":
cmd = "vg2svg"
else:
raise IOError("Only 'png' and 'svg' output is supported.")
npmbin = subprocess.Popen(["npm", "bin"], stdout=subprocess.PIPE)
if npmbin.wait() == 0:
npmbin = npmbin.stdout.read().strip()
else:
raise IOError("Nodejs Package Manager 'npm' must be installed to use nodejs.write function.") | if isinstance(vegaSpec, dict):
vegaSpec = json.dump(tmp, vegaSpec)
else:
tmp.write(vegaSpec)
tmp.close()
if outputFile is None:
vg2x = subprocess.Popen([cmd, tmp.name], stdout=subprocess.PIPE, env=dict(
os.environ, PATH=npmbin + ":" + os.environ.get("PATH", "")))
if vg2x.wait() == 0:
return vg2x.stdout.read()
else:
os.unlink(tmp.name)
raise IOError("Command '{0}' failed; if it's not installed, install it with 'npm install vega'".format(cmd))
else:
vg2x = subprocess.Popen([cmd, tmp.name, outputFile], stdout=subprocess.PIPE,
env=dict(os.environ, PATH=npmbin + ":" + os.environ.get("PATH", "")))
if vg2x.wait() != 0:
os.unlink(tmp.name)
raise IOError("Command '{0}' failed; if it's not installed, install it with 'npm install vega'".format(cmd)) |
tmp = tempfile.NamedTemporaryFile(delete=False)
| random_line_split |
nodejs.py | #!/usr/bin/env python
# Copyright 2016 DIANA-HEP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import subprocess
import tempfile
import os
def | (vegaSpec, outputFile, format=None):
"""Use the 'vega' package in Nodejs to write to SVG or PNG files.
Unlike interactive plotting, this does not require a round trip through a web browser, but it does require a
Nodejs installation on your computer (to evaluate the Javascript).
To install the prerequisites on an Ubuntu system, do
# Cairo dependencies for generating PNG:
sudo apt-get install install libcairo2-dev libjpeg-dev libgif-dev libpango1.0-dev build-essential g++
# Nodejs and its package manager, npm:
sudo apt-get install npm
# Get the 'vega' package with npm; user-install, not global (no sudo)!
npm install vega
Parameters:
vegaSpec (string or dict): JSON string or its dict-of-dicts equivalent
outputFile (string or None): output file name or None to return output as a string
format ('svg', 'png', or None): None (default) guesses format from outputFile extension
"""
if format is None and outputFile is None:
format = "svg"
elif format is None and outputFile.endswith(".svg"):
format = "svg"
elif format is None and outputFile.endswith(".png"):
format = "png"
else:
raise IOError("Could not infer format from outputFile")
if format == "png":
cmd = "vg2png"
elif format == "svg":
cmd = "vg2svg"
else:
raise IOError("Only 'png' and 'svg' output is supported.")
npmbin = subprocess.Popen(["npm", "bin"], stdout=subprocess.PIPE)
if npmbin.wait() == 0:
npmbin = npmbin.stdout.read().strip()
else:
raise IOError("Nodejs Package Manager 'npm' must be installed to use nodejs.write function.")
tmp = tempfile.NamedTemporaryFile(delete=False)
if isinstance(vegaSpec, dict):
vegaSpec = json.dump(tmp, vegaSpec)
else:
tmp.write(vegaSpec)
tmp.close()
if outputFile is None:
vg2x = subprocess.Popen([cmd, tmp.name], stdout=subprocess.PIPE, env=dict(
os.environ, PATH=npmbin + ":" + os.environ.get("PATH", "")))
if vg2x.wait() == 0:
return vg2x.stdout.read()
else:
os.unlink(tmp.name)
raise IOError("Command '{0}' failed; if it's not installed, install it with 'npm install vega'".format(cmd))
else:
vg2x = subprocess.Popen([cmd, tmp.name, outputFile], stdout=subprocess.PIPE,
env=dict(os.environ, PATH=npmbin + ":" + os.environ.get("PATH", "")))
if vg2x.wait() != 0:
os.unlink(tmp.name)
raise IOError("Command '{0}' failed; if it's not installed, install it with 'npm install vega'".format(cmd))
| write | identifier_name |
nodejs.py | #!/usr/bin/env python
# Copyright 2016 DIANA-HEP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import subprocess
import tempfile
import os
def write(vegaSpec, outputFile, format=None):
| """Use the 'vega' package in Nodejs to write to SVG or PNG files.
Unlike interactive plotting, this does not require a round trip through a web browser, but it does require a
Nodejs installation on your computer (to evaluate the Javascript).
To install the prerequisites on an Ubuntu system, do
# Cairo dependencies for generating PNG:
sudo apt-get install install libcairo2-dev libjpeg-dev libgif-dev libpango1.0-dev build-essential g++
# Nodejs and its package manager, npm:
sudo apt-get install npm
# Get the 'vega' package with npm; user-install, not global (no sudo)!
npm install vega
Parameters:
vegaSpec (string or dict): JSON string or its dict-of-dicts equivalent
outputFile (string or None): output file name or None to return output as a string
format ('svg', 'png', or None): None (default) guesses format from outputFile extension
"""
if format is None and outputFile is None:
format = "svg"
elif format is None and outputFile.endswith(".svg"):
format = "svg"
elif format is None and outputFile.endswith(".png"):
format = "png"
else:
raise IOError("Could not infer format from outputFile")
if format == "png":
cmd = "vg2png"
elif format == "svg":
cmd = "vg2svg"
else:
raise IOError("Only 'png' and 'svg' output is supported.")
npmbin = subprocess.Popen(["npm", "bin"], stdout=subprocess.PIPE)
if npmbin.wait() == 0:
npmbin = npmbin.stdout.read().strip()
else:
raise IOError("Nodejs Package Manager 'npm' must be installed to use nodejs.write function.")
tmp = tempfile.NamedTemporaryFile(delete=False)
if isinstance(vegaSpec, dict):
vegaSpec = json.dump(tmp, vegaSpec)
else:
tmp.write(vegaSpec)
tmp.close()
if outputFile is None:
vg2x = subprocess.Popen([cmd, tmp.name], stdout=subprocess.PIPE, env=dict(
os.environ, PATH=npmbin + ":" + os.environ.get("PATH", "")))
if vg2x.wait() == 0:
return vg2x.stdout.read()
else:
os.unlink(tmp.name)
raise IOError("Command '{0}' failed; if it's not installed, install it with 'npm install vega'".format(cmd))
else:
vg2x = subprocess.Popen([cmd, tmp.name, outputFile], stdout=subprocess.PIPE,
env=dict(os.environ, PATH=npmbin + ":" + os.environ.get("PATH", "")))
if vg2x.wait() != 0:
os.unlink(tmp.name)
raise IOError("Command '{0}' failed; if it's not installed, install it with 'npm install vega'".format(cmd)) | identifier_body | |
schedule-service.spec.ts | import {HttpClientTestingModule, HttpTestingController} from '@angular/common/http/testing';
import {inject, TestBed} from '@angular/core/testing';
import {ScheduleService} from './schedule-service';
import {HTTP_INTERCEPTORS} from '@angular/common/http';
import {ErrorInterceptor} from '../shared/http-error-interceptor';
import {NO_ERRORS_SCHEMA} from '@angular/core';
import {SaeService} from '../shared/sae-service';
import {ScheduleTestdata} from './schedule-testdata';
import {ApplianceTestdata} from '../appliance/appliance-testdata';
import {Logger, Options} from '../log/logger';
import {Level} from '../log/level';
describe('ScheduleService', () => {
beforeEach(() => TestBed.configureTestingModule({
imports: [HttpClientTestingModule],
providers: [
ScheduleService,
{
provide: HTTP_INTERCEPTORS,
useClass: ErrorInterceptor,
multi: true,
},
Logger,
{provide: Options, useValue: {level: Level.DEBUG}},
],
schemas: [NO_ERRORS_SCHEMA],
}));
afterEach(inject([HttpTestingController], (httpMock: HttpTestingController) => {
httpMock.verify();
}));
it('should return an empty array if the appliance has no schedules', (done: any) => {
const service = TestBed.get(ScheduleService);
const httpMock = TestBed.get(HttpTestingController);
const applianceId = ApplianceTestdata.getApplianceId();
service.getSchedules(applianceId).subscribe(
(res) => expect(res).toEqual([]),
() => {},
() => { done(); }
);
const req = httpMock.expectOne(`${SaeService.API}/schedules?id=${applianceId}`);
expect(req.request.method).toEqual('GET'); | req.flush('', { status: 204, statusText: 'Not content' });
});
it('should return a day time frame schedule', () => {
const service = TestBed.get(ScheduleService);
const httpMock = TestBed.get(HttpTestingController);
const applianceId = ApplianceTestdata.getApplianceId();
service.getSchedules(applianceId).subscribe(res => expect(res).toEqual([ScheduleTestdata.daytimeframe12345_type()]));
const req = httpMock.expectOne(`${SaeService.API}/schedules?id=${applianceId}`);
expect(req.request.method).toEqual('GET');
req.flush([ScheduleTestdata.daytimeframe12345_json(true)]);
});
it('should return a consecutive days time frame schedule', () => {
const service = TestBed.get(ScheduleService);
const httpMock = TestBed.get(HttpTestingController);
const applianceId = ApplianceTestdata.getApplianceId();
service.getSchedules(applianceId).subscribe(res => expect(res).toEqual([ScheduleTestdata.consecutiveDaysTimeframe567_type()]));
const req = httpMock.expectOne(`${SaeService.API}/schedules?id=${applianceId}`);
expect(req.request.method).toEqual('GET');
req.flush([ScheduleTestdata.consecutiveDaysTimeframe567_json(true)]);
});
xit('should update the schedules', () => {
const service = TestBed.get(ScheduleService);
const httpMock = TestBed.get(HttpTestingController);
const applianceId = ApplianceTestdata.getApplianceId();
service.setSchedules(applianceId, [ScheduleTestdata.daytimeframe12345_type()]).subscribe(res => expect(res).toBeTruthy());
const req = httpMock.expectOne(`${SaeService.API}/schedules?id=${applianceId}`);
expect(req.request.method).toEqual('PUT');
expect(JSON.parse(req.request.body)).toEqual(jasmine.objectContaining([ScheduleTestdata.daytimeframe12345_json(false)]));
});
}); | random_line_split |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.