code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9 values | license stringclasses 15 values | size int32 3 1.05M |
|---|---|---|---|---|---|
using Foundation;
using UIKit;
namespace XamarinSimulatedSensors.iOS
{
// The UIApplicationDelegate for the application. This class is responsible for launching the
// User Interface of the application, as well as listening (and optionally responding) to application events from iOS.
[Register ("AppDelegate")]
public class AppDelegate : UIApplicationDelegate
{
// class-level declarations
public override UIWindow Window {
get;
set;
}
public override bool FinishedLaunching (UIApplication application, NSDictionary launchOptions)
{
// Override point for customization after application launch.
// If not required for your application you can safely delete this method
return true;
}
public override void OnResignActivation (UIApplication application)
{
// Invoked when the application is about to move from active to inactive state.
// This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message)
// or when the user quits the application and it begins the transition to the background state.
// Games should use this method to pause the game.
}
public override void DidEnterBackground (UIApplication application)
{
// Use this method to release shared resources, save user data, invalidate timers and store the application state.
// If your application supports background exection this method is called instead of WillTerminate when the user quits.
}
public override void WillEnterForeground (UIApplication application)
{
// Called as part of the transiton from background to active state.
// Here you can undo many of the changes made on entering the background.
}
public override void OnActivated (UIApplication application)
{
// Restart any tasks that were paused (or not yet started) while the application was inactive.
// If the application was previously in the background, optionally refresh the user interface.
}
public override void WillTerminate (UIApplication application)
{
// Called when the application is about to terminate. Save data, if needed. See also DidEnterBackground.
}
}
}
| MSOpenTech/connectthedots | Devices/DirectlyConnectedDevices/XamarinSimulatedSensors/XamarinSimulatedSensors/XamarinSimulatedSensors.iOS/AppDelegate.cs | C# | mit | 2,158 |
package math.geometry;
interface Shape {
double calcArea();
} | joaopedronardari/COO-EACHUSP | Listas/Lista 1/4/src/math/geometry/Shape.java | Java | mit | 63 |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.Runtime.InteropServices;
internal static partial class Interop
{
internal static partial class Sys
{
[DllImport(Libraries.SystemNative, EntryPoint = "SystemNative_GetPeerName")]
internal static extern unsafe Error GetPeerName(int socket, byte* socketAddress, int* socketAddressLen);
}
}
| josguil/corefx | src/Common/src/Interop/Unix/System.Native/Interop.GetPeerName.cs | C# | mit | 495 |
package com.ibanheiz.model;
public class Erdinger extends Cerveja {
@Override
public void info() {
System.out.println("Sou uma breja alemã boa e modinha");
}
}
| MarcosToledo/java-design-patterns | FactoryMethod/src/com/ibanheiz/model/Erdinger.java | Java | mit | 169 |
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using AllReady.Areas.Admin.Features.Itineraries;
using AllReady.Features.Notifications;
using AllReady.Models;
using MediatR;
using Microsoft.AspNetCore.Mvc.Rendering;
using Moq;
using Xunit;
namespace AllReady.UnitTest.Areas.Admin.Features.Itineraries
{
public class AddTeamMemberCommandHandlerAsyncTests : InMemoryContextTest
{
protected override void LoadTestData()
{
var htb = new Organization
{
Name = "Humanitarian Toolbox",
LogoUrl = "http://www.htbox.org/upload/home/ht-hero.png",
WebUrl = "http://www.htbox.org",
Campaigns = new List<Campaign>()
};
var firePrev = new Campaign
{
Name = "Neighborhood Fire Prevention Days",
ManagingOrganization = htb
};
htb.Campaigns.Add(firePrev);
var queenAnne = new Event
{
Id = 1,
Name = "Queen Anne Fire Prevention Day",
Campaign = firePrev,
CampaignId = firePrev.Id,
StartDateTime = new DateTime(2015, 7, 4, 10, 0, 0).ToUniversalTime(),
EndDateTime = new DateTime(2015, 12, 31, 15, 0, 0).ToUniversalTime(),
Location = new Location { Id = 1 },
RequiredSkills = new List<EventSkill>(),
EventType = EventType.Itinerary
};
var itinerary = new Itinerary
{
Event = queenAnne,
Name = "1st Itinerary",
Id = 1,
Date = new DateTime(2016, 07, 01)
};
Context.Organizations.Add(htb);
Context.Campaigns.Add(firePrev);
Context.Events.Add(queenAnne);
Context.Itineraries.Add(itinerary);
Context.SaveChanges();
}
[Fact]
public async Task AddTeamMemberCommandHandlerAsyncReturnsFalseWhenItineraryDoesNotExist()
{
var query = new AddTeamMemberCommand
{
ItineraryId = 0,
TaskSignupId = 1
};
var handler = new AddTeamMemberCommandHandlerAsync(Context, null);
var result = await handler.Handle(query);
Assert.Equal(false, result);
}
[Fact]
public async Task AddTeamMemberCommandHandlerAsyncReturnsTrueWhenItineraryExists()
{
var query = new AddTeamMemberCommand
{
ItineraryId = 1,
TaskSignupId = 1
};
var handler = new AddTeamMemberCommandHandlerAsync(Context, Mock.Of<IMediator>());
var result = await handler.Handle(query);
Assert.Equal(true, result);
}
[Fact]
public async Task AddTeamMemberCommandHandlerAsyncSendsPotentialItineraryTeamMemberQueryWithCorrectEventId()
{
var query = new AddTeamMemberCommand
{
ItineraryId = 1,
TaskSignupId = 1
};
var mockMediator = new Mock<IMediator>();
var handler = new AddTeamMemberCommandHandlerAsync(Context, mockMediator.Object);
await handler.Handle(query);
mockMediator.Verify(x => x.SendAsync(It.Is<PotentialItineraryTeamMembersQuery>(y => y.EventId == 1)), Times.Once);
}
[Fact(Skip = "RTM Broken Tests")]
public async Task AddTeamMemberCommandHandlerAsyncPublishesItineraryVolunteerListUpdatedWhenMatchedOnTaskSignupId()
{
var query = new AddTeamMemberCommand
{
ItineraryId = 1,
TaskSignupId = 1
};
var potentialTaskSignups = new List<SelectListItem>
{
new SelectListItem
{
Text = "user@domain.tld : Test TaskName",
Value = query.TaskSignupId.ToString()
}
};
var mockMediator = new Mock<IMediator>();
mockMediator.Setup(x => x.SendAsync(It.IsAny<PotentialItineraryTeamMembersQuery>())).ReturnsAsync(potentialTaskSignups);
var handler = new AddTeamMemberCommandHandlerAsync(Context, mockMediator.Object);
await handler.Handle(query);
mockMediator.Verify(x => x.PublishAsync(It.Is<IntineraryVolunteerListUpdated>(y => y.TaskSignupId == query.TaskSignupId && y.ItineraryId == query.ItineraryId && y.UpdateType == UpdateType.VolunteerAssigned)), Times.Once);
}
}
} | mheggeseth/allReady | AllReadyApp/Web-App/AllReady.UnitTest/Areas/Admin/Features/Itineraries/AddTeamMemberCommandHandlerAsyncTests.cs | C# | mit | 4,697 |
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows;
using Windows.UI.Xaml;
using Windows.UI.Xaml.Data;
namespace Microsoft.HockeyApp.Tools
{
/// <summary>
/// xaml converter boolean to visibility
/// </summary>
public class BooleanToVisibilityConverter : IValueConverter
{
/// <summary>
/// Modifies the source data before passing it to the target for display in the UI.
/// </summary>
/// <param name="value">The source data being passed to the target.</param>
/// <param name="targetType">The type of the target property, as a type reference (System.Type for Microsoft .NET, a TypeName helper struct for Visual C++ component extensions (C++/CX)).</param>
/// <param name="parameter">An optional parameter to be used in the converter logic.</param>
/// <param name="language">The language of the conversion.</param>
/// <returns>
/// The value to be passed to the target dependency property.
/// </returns>
public object Convert(object value, Type targetType, object parameter, string language)
{
return ((bool)value) ? Visibility.Visible : Visibility.Collapsed;
}
/// <summary>
/// Modifies the target data before passing it to the source object. This method is called only in TwoWay bindings.
/// </summary>
/// <param name="value">The target data being passed to the source.</param>
/// <param name="targetType">The type of the target property, as a type reference (System.Type for Microsoft .NET, a TypeName helper struct for Visual C++ component extensions (C++/CX)).</param>
/// <param name="parameter">An optional parameter to be used in the converter logic.</param>
/// <param name="language">The language of the conversion.</param>
/// <returns>
/// The value to be passed to the source object.
/// </returns>
public object ConvertBack(object value, Type targetType, object parameter, string language)
{
return ((Visibility)value) == Visibility.Visible;
}
}
}
| bitstadium/HockeySDK-Windows | Src/Kit.WP81/Universal/Tools/BooleanToVisibilityConverter.cs | C# | mit | 2,258 |
import { isUndefined, isObject } from '../utils/isType';
$(() => {
// url for the api we will be querying
let url = '';
// key/value lookup for standards
const descriptions = {};
// cleaned up structure of the API results
const minTree = {};
// keeps track of how many waiting api-requests still need to run
let noWaiting = 0;
// prune the min_tree where there are no standards
// opporates on the principal that no two subjects have the same name
function removeUnused(name) {
const num = Object.keys(minTree).find((x) => minTree[x].name === name);
// if not top level standard
if (isUndefined(num)) {
// for each top level standard
Object.keys(minTree).forEach((knum) => {
const child = Object.keys(minTree[knum].children)
.find((x) => minTree[knum].children[x].name === name);
if (isObject(child)) {
delete minTree[knum].children[child];
$(`.rda_metadata .sub-subject select option[value="${name}"]`).remove();
}
});
} else {
delete minTree[num];
// remove min_tree[num] from top-level dropdowns
$(`.rda_metadata .subject select option[value="${name}"]`).remove();
}
}
function getDescription(id) {
$.ajax({
url: url + id.slice(4),
type: 'GET',
crossDomain: true,
dataType: 'json',
}).done((results) => {
descriptions[id] = {};
descriptions[id].title = results.title;
descriptions[id].description = results.description;
noWaiting -= 1;
});
}
// init descriptions lookup table based on passed ids
function initDescriptions(ids) {
ids.forEach((id) => {
if (!(id in descriptions)) {
noWaiting += 1;
getDescription(id);
}
});
}
// takes in a subset of the min_tree which has name and standards properties
// initializes the standards property to the result of an AJAX POST
function getStandards(name, num, child) {
// slice -4 from url to remove '/api/'
noWaiting += 1;
$.ajax({
url: `${url.slice(0, -4)}query/schemes`,
type: 'POST',
crossDomain: true,
data: `keyword=${name}`,
dataType: 'json',
}).done((result) => {
if (isUndefined(child)) {
minTree[num].standards = result.ids;
} else {
minTree[num].children[child].standards = result.ids;
}
if (result.ids.length < 1) {
removeUnused(name);
}
noWaiting -= 1;
initDescriptions(result.ids);
});
}
// clean up the data initially returned from the API
function cleanTree(apiTree) {
// iterate over api_tree
Object.keys(apiTree).forEach((num) => {
minTree[num] = {};
minTree[num].name = apiTree[num].name;
minTree[num].children = [];
if (apiTree[num].children !== undefined) {
Object.keys(apiTree[num].children).forEach((child) => {
minTree[num].children[child] = {};
minTree[num].children[child].name = apiTree[num].children[child].name;
minTree[num].children[child].standards = [];
getStandards(minTree[num].children[child].name, num, child);
});
}
// init a standards on top level
minTree[num].standards = [];
getStandards(minTree[num].name, num, undefined);
});
}
// create object for typeahead
function initTypeahead() {
const data = [];
const simpdat = [];
Object.keys(descriptions).forEach((id) => {
data.push({ value: descriptions[id].title, id });
simpdat.push(descriptions[id].title);
});
const typ = $('.standards-typeahead');
typ.typeahead({ source: simpdat });
}
function initStandards() {
// for each metadata question, init selected standards according to html
$('.rda_metadata').each(function () { // eslint-disable-line func-names
// list of selected standards
const selectedStandards = $(this).find('.selected_standards .list');
// form listing of standards
const formStandards = $(this).next('form').find('#standards');
// need to pull in the value from frm_stds
const standardsArray = JSON.parse(formStandards.val());
// init the data value
formStandards.data('standard', standardsArray);
Object.keys(standardsArray).forEach((key) => {
// add the standard to list
if (key === standardsArray[key]) {
selectedStandards.append(`<li class="${key}">${key}<button class="remove-standard"><i class="fas fa-times-circle"></i></button></li`);
} else {
selectedStandards.append(`<li class="${key}">${descriptions[key].title}<button class="remove-standard"><i class="fas fa-times-circle"></i></button></li>`);
}
});
});
}
function waitAndUpdate() {
if (noWaiting > 0) {
// if we are waiting on api responces, call this function in 1 seccond
setTimeout(waitAndUpdate, 1000);
} else {
// update all the dropdowns/ standards explore box (calling on subject
// will suffice since it will necisarily update sub-subject)
$('.rda_metadata .subject select').change();
initStandards();
initTypeahead();
}
}
// given a subject name, returns the portion of the min_tree applicable
function getSubject(subjectText) {
const num = Object.keys(minTree).find((x) => minTree[x].name === subjectText);
return minTree[num];
}
// given a subsubject name and an array of children, data, return the
// applicable child
function getSubSubject(subsubjectText, data) {
const child = Object.keys(data).find((x) => data[x].name === subsubjectText);
return data[child];
}
function updateSaveStatus(group) {
// update save/autosave status
group.next('form').find('fieldset input').change();
}
// change sub-subjects and standards based on selected subject
$('.rda_metadata').on('change', '.subject select', (e) => {
const target = $(e.currentTarget);
const group = target.closest('.rda_metadata');
const subSubject = group.find('.sub-subject select');
const subjectText = target.find(':selected').text();
// find subject in min_tree
const subject = getSubject(subjectText);
// check to see if this object has no children(and thus it's own standards)
if (subject.children.length === 0) {
// hide sub-subject since there's no data for it
subSubject.closest('div').hide();
// update the standards display selector
$('.rda_metadata .sub-subject select').change();
} else {
// show the sub-subject incase it was previously hidden
subSubject.closest('div').show();
// update the sub-subject display selector
subSubject.find('option').remove();
subject.children.forEach((child) => {
$('<option />', { value: child.name, text: child.name }).appendTo(subSubject);
});
// once we have updated the sub-standards, ensure the standards displayed
// get updated as well
$('.rda_metadata .sub-subject select').change();
}
});
// change standards based on selected sub-subject
$('.rda_metadata').on('change', '.sub-subject select', (e) => {
const target = $(e.currentTarget);
const group = target.closest('.rda_metadata');
const subject = group.find('.subject select');
const subSubject = group.find('.sub-subject select');
const subjectText = subject.find(':selected').text();
const subjectData = getSubject(subjectText);
const standards = group.find('.browse-standards-border');
let standardsData;
if (subjectData.children.length === 0) {
// update based on subject's standards
standardsData = subjectData.standards;
} else {
// update based on sub-subject's standards
const subsubjectText = subSubject.find(':selected').text();
standardsData = getSubSubject(subsubjectText, subjectData.children).standards;
}
// clear list of standards
standards.empty();
// update list of standards
Object.keys(standardsData).forEach((num) => {
const standard = descriptions[standardsData[num]];
standards.append(`<div style="background-color:#EAEAEA;border-radius:3px"><strong>${standard.title}</strong><div style="float:right"><button class="btn btn-primary select_standard" data-standard="${standardsData[num]}">Add Standard</button></br></div><p>${standard.description}</p></div>`);
});
});
// when 'Add Standard' button next to the search is clicked, we need to add
// this to the user's selected list of standards.
// update the data and val of hidden standards field in form
$('.rda_metadata').on('click', '.select_standard_typeahead', (e) => {
const target = $(e.currentTarget);
const group = target.closest('.rda_metadata');
const selected = group.find('ul.typeahead li.active');
const selectedStandards = group.find('.selected_standards .list');
// the title of the standard
const standardTitle = selected.data('value');
// need to find the standard
let standard;
Object.keys(descriptions).forEach((standardId) => {
if (descriptions[standardId].title === standardTitle) {
standard = standardId;
}
});
selectedStandards.append(`<li class="${standard}">${descriptions[standard].title}<button class="remove-standard"><i class="fas fa-times-circle"></i></button></li>`);
const formStandards = group.next('form').find('#standards');
// get the data for selected standards from the data attribute 'standard'
// of the hidden field #standards within the answer form
let frmStdsDat = formStandards.data('standard');
// need to init data object for first time
if (typeof frmStdsDat === 'undefined') {
frmStdsDat = {};
}
// init the key to standard id and value to standard.
// NOTE: is there any point in storing the title or description here?
// storing the title could make export easier as we wolnt need to query api
// but queries to the api would be 1 per-standard if we dont store these
frmStdsDat[standard] = descriptions[standard].title;
// update data value
formStandards.data('standard', frmStdsDat);
// update input value
formStandards.val(JSON.stringify(frmStdsDat));
updateSaveStatus(group);
});
// when a 'Add standard' button is clicked, we need to add this to the user's
// selected list of standards
// update the data and val of hidden standards field in form
$('.rda_metadata').on('click', '.select_standard', (e) => {
const target = $(e.currentTarget);
const group = target.closest('.rda_metadata');
const selectedStandards = group.find('.selected_standards .list');
// the identifier for the standard which was selected
const standard = target.data('standard');
// append the standard to the displayed list of selected standards
selectedStandards.append(`<li class="${standard}">${descriptions[standard].title}<button class="remove-standard"><i class="fas fa-times-circle"></i></button></li>`);
const formStandards = group.next('form').find('#standards');
// get the data for selected standards from the data attribute 'standard'
// of the hidden field #standards within the answer form
let frmStdsDat = formStandards.data('standard');
// need to init data object for first time
if (isUndefined(frmStdsDat)) {
frmStdsDat = {};
}
// init the key to standard id and value to standard.
frmStdsDat[standard] = descriptions[standard].title;
// update data value
formStandards.data('standard', frmStdsDat);
// update input value
formStandards.val(JSON.stringify(frmStdsDat));
updateSaveStatus(group);
});
// when a 'Remove Standard' button is clicked, we need to remove this from the
// user's selected list of standards. Additionally, we need to remove the
// standard from the data/val fields of standards in hidden form
$('.rda_metadata').on('click', '.remove-standard', (e) => {
const target = $(e.currentTarget);
const group = target.closest('.rda_metadata');
const listedStandard = target.closest('li');
const standardId = listedStandard.attr('class');
// remove the standard from the list
listedStandard.remove();
// update the data for the form
const formStandards = group.next('form').find('#standards');
const frmStdsDat = formStandards.data('standard');
delete frmStdsDat[standardId];
// update data value
formStandards.data('standard', frmStdsDat);
// update input value
formStandards.val(JSON.stringify(frmStdsDat));
updateSaveStatus(group);
});
// show the add custom standard div when standard not listed clicked
$('.rda_metadata').on('click', '.custom-standard', (e) => {
e.preventDefault();
const target = $(e.currentTarget);
const group = target.closest('.rda_metadata');
const addStandardDiv = $(group.find('.add-custom-standard'));
addStandardDiv.show();
});
// when this button is clicked, we add the typed standard to the list of
// selected standards
$('.rda_metadata').on('click', '.submit_custom_standard', (e) => {
e.preventDefault();
const target = $(e.currentTarget);
const group = target.closest('.rda_metadata');
const selectedStandards = group.find('.selected_standards .list');
const standardName = group.find('.custom-standard-name').val();
selectedStandards.append(`<li class="${standardName}">${standardName}<button class="remove-standard"><i class="fas fa-times-circle"></i></button></li>`);
const formStandards = group.next('form').find('#standards');
// get the data for selected standards from the data attribute 'standard'
// of the hidden field #standards within the answer form
let frmStdsDat = formStandards.data('standard');
// need to init data object for first time
if (typeof frmStdsDat === 'undefined') {
frmStdsDat = {};
}
// init the key to standard id and value to standard.
frmStdsDat[standardName] = standardName;
// update data value
formStandards.data('standard', frmStdsDat);
// update input value
formStandards.val(JSON.stringify(frmStdsDat));
updateSaveStatus(group);
});
function initMetadataQuestions() {
// find all elements with rda_metadata div
$('.rda_metadata').each((idx, el) => {
// $(this) is the element
const sub = $(el).find('.subject select');
// var sub_subject = $(this).find(".sub-subject select");
Object.keys(minTree).forEach((num) => {
$('<option />', { value: minTree[num].name, text: minTree[num].name }).appendTo(sub);
});
});
waitAndUpdate();// $(".rda_metadata .subject select").change();
}
// callback from url+subject-index
// define api_tree and call to initMetadataQuestions
function subjectCallback(data) {
// remove unused standards/substandards
cleanTree(data);
// initialize the dropdowns/selected standards for the page
initMetadataQuestions();
}
// callback from get request to rda_api_address
// define url and make a call to url+subject-index
function urlCallback(data) {
// init url
({ url } = data);
// get api_tree structure from api
$.ajax({
url: `${url}subject-index`,
type: 'GET',
crossDomain: true,
dataType: 'json',
}).done((result) => {
subjectCallback(result);
});
}
// get the url we will be using for the api
// only do this if page has an rda_metadata div
if ($('.rda_metadata').length) {
$.getJSON('/question_formats/rda_api_address', urlCallback);
}
// when the autosave or save action occurs, this clears out both the list of
// selected standards, and the selectors for new standards, as it re-renders
// the partial. This "autosave" event is triggered by that JS in order to
// allow us to know when the save has happened and re-init the question
$('.rda_metadata').on('autosave', (e) => {
e.preventDefault();
// re-initialize the metadata question
initMetadataQuestions();
});
});
| DigitalCurationCentre/roadmap | app/javascript/src/answers/rdaMetadata.js | JavaScript | mit | 16,053 |
package com.alexstyl.specialdates.events.peopleevents;
import android.content.ContentValues;
import com.alexstyl.specialdates.Optional;
import com.alexstyl.specialdates.contact.Contact;
import com.alexstyl.specialdates.date.ContactEvent;
import com.alexstyl.specialdates.events.database.DatabaseContract.AnnualEventsContract;
import java.util.List;
public class ContactEventsMarshaller {
private static final int DEFAULT_VALUES_SIZE = 5;
private static final int IS_VISIBILE = 1;
private final ShortDateLabelCreator dateLabelCreator;
public ContactEventsMarshaller(ShortDateLabelCreator dateLabelCreator) {
this.dateLabelCreator = dateLabelCreator;
}
public ContentValues[] marshall(List<ContactEvent> item) {
ContentValues[] returningValues = new ContentValues[item.size()];
for (int i = 0; i < item.size(); i++) {
ContactEvent event = item.get(i);
returningValues[i] = createValuesFor(event);
}
return returningValues;
}
private ContentValues createValuesFor(ContactEvent event) {
Contact contact = event.getContact();
ContentValues values = new ContentValues(DEFAULT_VALUES_SIZE);
values.put(AnnualEventsContract.CONTACT_ID, contact.getContactID());
values.put(AnnualEventsContract.DISPLAY_NAME, contact.getDisplayName().toString());
values.put(AnnualEventsContract.DATE, dateLabelCreator.createLabelWithYearPreferredFor(event.getDate()));
values.put(AnnualEventsContract.EVENT_TYPE, event.getType().getId());
values.put(AnnualEventsContract.SOURCE, contact.getSource());
values.put(AnnualEventsContract.VISIBLE, IS_VISIBILE);
putDeviceContactIdIfPresent(event, values);
return values;
}
private void putDeviceContactIdIfPresent(ContactEvent event, ContentValues values) {
Optional<Long> deviceEventId = event.getDeviceEventId();
if (deviceEventId.isPresent()) {
values.put(AnnualEventsContract.DEVICE_EVENT_ID, deviceEventId.get());
} else {
values.put(AnnualEventsContract.DEVICE_EVENT_ID, -1);
}
}
}
| alexstyl/Memento-Calendar | android_mobile/src/main/java/com/alexstyl/specialdates/events/peopleevents/ContactEventsMarshaller.java | Java | mit | 2,165 |
module.exports = require('./consistent_hashing');
| shawnvan/coffee-server-demo | node_modules/consistent-hashing/lib/index.js | JavaScript | mit | 50 |
var express = require('express');
var path = require('path');
var logger = require('morgan');
var cookieParser = require('cookie-parser');
var bodyParser = require('body-parser');
var index = require('./routes/index');
var users = require('./routes/users');
var app = express();
// view engine setup
app.set('views', path.join(__dirname, 'views'));
app.set('view engine', 'pug');
// uncomment after placing your favicon in /public
//app.use(favicon(path.join(__dirname, 'public', 'favicon.ico')));
app.use(logger('dev'));
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: false }));
app.use(cookieParser());
app.use(express.static(path.join(__dirname, 'public')));
app.use('/', index);
app.use('/users', users);
// catch 404 and forward to error handler
app.use(function(req, res, next) {
var err = new Error('Not Found');
err.status = 404;
next(err);
});
// error handler
app.use(function(err, req, res, next) {
// set locals, only providing error in development
res.locals.message = err.message;
res.locals.error = req.app.get('env') === 'development' ? err : {};
// render the error page
res.status(err.status || 500);
res.render('error');
});
module.exports = app;
| lagerjs/lager | demo/express-app/app.js | JavaScript | mit | 1,216 |
// Copyright (c) 2010 Satoshi Nakamoto
// Copyright (c) 2009-2013 The Bitcoin developers
// Distributed under the MIT/X11 software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include "bitcoinrpc.h"
#include "base58.h"
#include "init.h"
#include "main.h"
#include "util.h"
#include "wallet.h"
#include <boost/algorithm/string.hpp>
#include <boost/asio.hpp>
#include <boost/asio/ssl.hpp>
#include <boost/bind.hpp>
#include <boost/filesystem.hpp>
#include <boost/foreach.hpp>
#include <boost/iostreams/concepts.hpp>
#include <boost/iostreams/stream.hpp>
#include <boost/lexical_cast.hpp>
#include <boost/shared_ptr.hpp>
#include "json/json_spirit_writer_template.h"
using namespace std;
using namespace boost;
using namespace boost::asio;
using namespace json_spirit;
static std::string strRPCUserColonPass;
// These are created by StartRPCThreads, destroyed in StopRPCThreads
static asio::io_service* rpc_io_service = NULL;
static map<string, boost::shared_ptr<deadline_timer> > deadlineTimers;
static ssl::context* rpc_ssl_context = NULL;
static boost::thread_group* rpc_worker_group = NULL;
Object JSONRPCError(int code, const string& message)
{
Object error;
error.push_back(Pair("code", code));
error.push_back(Pair("message", message));
return error;
}
void RPCTypeCheck(const Array& params,
const list<Value_type>& typesExpected,
bool fAllowNull)
{
unsigned int i = 0;
BOOST_FOREACH(Value_type t, typesExpected)
{
if (params.size() <= i)
break;
const Value& v = params[i];
if (!((v.type() == t) || (fAllowNull && (v.type() == null_type))))
{
string err = strprintf("Expected type %s, got %s",
Value_type_name[t], Value_type_name[v.type()]);
throw JSONRPCError(RPC_TYPE_ERROR, err);
}
i++;
}
}
void RPCTypeCheck(const Object& o,
const map<string, Value_type>& typesExpected,
bool fAllowNull)
{
BOOST_FOREACH(const PAIRTYPE(string, Value_type)& t, typesExpected)
{
const Value& v = find_value(o, t.first);
if (!fAllowNull && v.type() == null_type)
throw JSONRPCError(RPC_TYPE_ERROR, strprintf("Missing %s", t.first.c_str()));
if (!((v.type() == t.second) || (fAllowNull && (v.type() == null_type))))
{
string err = strprintf("Expected type %s for %s, got %s",
Value_type_name[t.second], t.first.c_str(), Value_type_name[v.type()]);
throw JSONRPCError(RPC_TYPE_ERROR, err);
}
}
}
int64_t AmountFromValue(const Value& value)
{
double dAmount = value.get_real();
if (dAmount <= 0.0 || dAmount > 21000000.0)
throw JSONRPCError(RPC_TYPE_ERROR, "Invalid amount");
int64_t nAmount = roundint64(dAmount * COIN);
if (!MoneyRange(nAmount))
throw JSONRPCError(RPC_TYPE_ERROR, "Invalid amount");
return nAmount;
}
Value ValueFromAmount(int64_t amount)
{
return (double)amount / (double)COIN;
}
std::string HexBits(unsigned int nBits)
{
union {
int32_t nBits;
char cBits[4];
} uBits;
uBits.nBits = htonl((int32_t)nBits);
return HexStr(BEGIN(uBits.cBits), END(uBits.cBits));
}
uint256 ParseHashV(const Value& v, string strName)
{
string strHex;
if (v.type() == str_type)
strHex = v.get_str();
if (!IsHex(strHex)) // Note: IsHex("") is false
throw JSONRPCError(RPC_INVALID_PARAMETER, strName+" must be hexadecimal string (not '"+strHex+"')");
uint256 result;
result.SetHex(strHex);
return result;
}
uint256 ParseHashO(const Object& o, string strKey)
{
return ParseHashV(find_value(o, strKey), strKey);
}
vector<unsigned char> ParseHexV(const Value& v, string strName)
{
string strHex;
if (v.type() == str_type)
strHex = v.get_str();
if (!IsHex(strHex))
throw JSONRPCError(RPC_INVALID_PARAMETER, strName+" must be hexadecimal string (not '"+strHex+"')");
return ParseHex(strHex);
}
vector<unsigned char> ParseHexO(const Object& o, string strKey)
{
return ParseHexV(find_value(o, strKey), strKey);
}
///
/// Note: This interface may still be subject to change.
///
string CRPCTable::help(string strCommand) const
{
string strRet;
set<rpcfn_type> setDone;
for (map<string, const CRPCCommand*>::const_iterator mi = mapCommands.begin(); mi != mapCommands.end(); ++mi)
{
const CRPCCommand *pcmd = mi->second;
string strMethod = mi->first;
// We already filter duplicates, but these deprecated screw up the sort order
if (strMethod.find("label") != string::npos)
continue;
if (strCommand != "" && strMethod != strCommand)
continue;
if (pcmd->reqWallet && !pwalletMain)
continue;
try
{
Array params;
rpcfn_type pfn = pcmd->actor;
if (setDone.insert(pfn).second)
(*pfn)(params, true);
}
catch (std::exception& e)
{
// Help text is returned in an exception
string strHelp = string(e.what());
if (strCommand == "")
if (strHelp.find('\n') != string::npos)
strHelp = strHelp.substr(0, strHelp.find('\n'));
strRet += strHelp + "\n";
}
}
if (strRet == "")
strRet = strprintf("help: unknown command: %s\n", strCommand.c_str());
strRet = strRet.substr(0,strRet.size()-1);
return strRet;
}
Value help(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 1)
throw runtime_error(
"help ( \"command\" )\n"
"\nList all commands, or get help for a specified command.\n"
"\nArguments:\n"
"1. \"command\" (string, optional) The command to get help on\n"
"\nResult:\n"
"\"text\" (string) The help text\n"
);
string strCommand;
if (params.size() > 0)
strCommand = params[0].get_str();
return tableRPC.help(strCommand);
}
Value stop(const Array& params, bool fHelp)
{
// Accept the deprecated and ignored 'detach' boolean argument
if (fHelp || params.size() > 1)
throw runtime_error(
"stop\n"
"\nStop Bitcoin server.");
// Shutdown will take long enough that the response should get back
StartShutdown();
return "Bitcoin server stopping";
}
//
// Call Table
//
static const CRPCCommand vRPCCommands[] =
{ // name actor (function) okSafeMode threadSafe reqWallet
// ------------------------ ----------------------- ---------- ---------- ---------
{ "help", &help, true, true, false },
{ "stop", &stop, true, true, false },
{ "getblockcount", &getblockcount, true, false, false },
{ "getbestblockhash", &getbestblockhash, true, false, false },
{ "getconnectioncount", &getconnectioncount, true, false, false },
{ "getpeerinfo", &getpeerinfo, true, false, false },
{ "ping", &ping, true, false, false },
{ "addnode", &addnode, true, true, false },
{ "getaddednodeinfo", &getaddednodeinfo, true, true, false },
{ "getnettotals", &getnettotals, true, true, false },
{ "getdifficulty", &getdifficulty, true, false, false },
{ "getnetworkhashps", &getnetworkhashps, true, false, false },
{ "getgenerate", &getgenerate, true, false, false },
{ "setgenerate", &setgenerate, true, false, true },
{ "gethashespersec", &gethashespersec, true, false, false },
{ "getinfo", &getinfo, true, false, false },
{ "getmininginfo", &getmininginfo, true, false, false },
{ "getnewaddress", &getnewaddress, true, false, true },
{ "getaccountaddress", &getaccountaddress, true, false, true },
{ "getrawchangeaddress", &getrawchangeaddress, true, false, true },
{ "setaccount", &setaccount, true, false, true },
{ "getaccount", &getaccount, false, false, true },
{ "getaddressesbyaccount", &getaddressesbyaccount, true, false, true },
{ "sendtoaddress", &sendtoaddress, false, false, true },
{ "getreceivedbyaddress", &getreceivedbyaddress, false, false, true },
{ "getreceivedbyaccount", &getreceivedbyaccount, false, false, true },
{ "listreceivedbyaddress", &listreceivedbyaddress, false, false, true },
{ "listreceivedbyaccount", &listreceivedbyaccount, false, false, true },
{ "backupwallet", &backupwallet, true, false, true },
{ "keypoolrefill", &keypoolrefill, true, false, true },
{ "walletpassphrase", &walletpassphrase, true, false, true },
{ "walletpassphrasechange", &walletpassphrasechange, false, false, true },
{ "walletlock", &walletlock, true, false, true },
{ "encryptwallet", &encryptwallet, false, false, true },
{ "validateaddress", &validateaddress, true, false, false },
{ "getbalance", &getbalance, false, false, true },
{ "move", &movecmd, false, false, true },
{ "sendfrom", &sendfrom, false, false, true },
{ "sendmany", &sendmany, false, false, true },
{ "addmultisigaddress", &addmultisigaddress, false, false, true },
{ "createmultisig", &createmultisig, true, true , false },
{ "getrawmempool", &getrawmempool, true, false, false },
{ "getblock", &getblock, false, false, false },
{ "getblockhash", &getblockhash, false, false, false },
{ "gettransaction", &gettransaction, false, false, true },
{ "listtransactions", &listtransactions, false, false, true },
{ "listaddressgroupings", &listaddressgroupings, false, false, true },
{ "signmessage", &signmessage, false, false, true },
{ "verifymessage", &verifymessage, false, false, false },
{ "getwork", &getwork, true, false, true },
{ "listaccounts", &listaccounts, false, false, true },
{ "settxfee", &settxfee, false, false, true },
{ "getblocktemplate", &getblocktemplate, true, false, false },
{ "submitblock", &submitblock, false, false, false },
{ "listsinceblock", &listsinceblock, false, false, true },
{ "dumpprivkey", &dumpprivkey, true, false, true },
{ "dumpwallet", &dumpwallet, true, false, true },
{ "importprivkey", &importprivkey, false, false, true },
{ "importwallet", &importwallet, false, false, true },
{ "listunspent", &listunspent, false, false, true },
{ "getrawtransaction", &getrawtransaction, false, false, false },
{ "createrawtransaction", &createrawtransaction, false, false, false },
{ "decoderawtransaction", &decoderawtransaction, false, false, false },
{ "decodescript", &decodescript, false, false, false },
{ "signrawtransaction", &signrawtransaction, false, false, false },
{ "sendrawtransaction", &sendrawtransaction, false, false, false },
{ "gettxoutsetinfo", &gettxoutsetinfo, true, false, false },
{ "gettxout", &gettxout, true, false, false },
{ "lockunspent", &lockunspent, false, false, true },
{ "listlockunspent", &listlockunspent, false, false, true },
{ "verifychain", &verifychain, true, false, false },
};
CRPCTable::CRPCTable()
{
unsigned int vcidx;
for (vcidx = 0; vcidx < (sizeof(vRPCCommands) / sizeof(vRPCCommands[0])); vcidx++)
{
const CRPCCommand *pcmd;
pcmd = &vRPCCommands[vcidx];
mapCommands[pcmd->name] = pcmd;
}
}
const CRPCCommand *CRPCTable::operator[](string name) const
{
map<string, const CRPCCommand*>::const_iterator it = mapCommands.find(name);
if (it == mapCommands.end())
return NULL;
return (*it).second;
}
//
// HTTP protocol
//
// This ain't Apache. We're just using HTTP header for the length field
// and to be compatible with other JSON-RPC implementations.
//
string HTTPPost(const string& strMsg, const map<string,string>& mapRequestHeaders)
{
ostringstream s;
s << "POST / HTTP/1.1\r\n"
<< "User-Agent: bitcoin-json-rpc/" << FormatFullVersion() << "\r\n"
<< "Host: 127.0.0.1\r\n"
<< "Content-Type: application/json\r\n"
<< "Content-Length: " << strMsg.size() << "\r\n"
<< "Connection: close\r\n"
<< "Accept: application/json\r\n";
BOOST_FOREACH(const PAIRTYPE(string, string)& item, mapRequestHeaders)
s << item.first << ": " << item.second << "\r\n";
s << "\r\n" << strMsg;
return s.str();
}
string rfc1123Time()
{
char buffer[64];
time_t now;
time(&now);
struct tm* now_gmt = gmtime(&now);
string locale(setlocale(LC_TIME, NULL));
setlocale(LC_TIME, "C"); // we want POSIX (aka "C") weekday/month strings
strftime(buffer, sizeof(buffer), "%a, %d %b %Y %H:%M:%S +0000", now_gmt);
setlocale(LC_TIME, locale.c_str());
return string(buffer);
}
static string HTTPReply(int nStatus, const string& strMsg, bool keepalive)
{
if (nStatus == HTTP_UNAUTHORIZED)
return strprintf("HTTP/1.0 401 Authorization Required\r\n"
"Date: %s\r\n"
"Server: bitcoin-json-rpc/%s\r\n"
"WWW-Authenticate: Basic realm=\"jsonrpc\"\r\n"
"Content-Type: text/html\r\n"
"Content-Length: 296\r\n"
"\r\n"
"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.01 Transitional//EN\"\r\n"
"\"http://www.w3.org/TR/1999/REC-html401-19991224/loose.dtd\">\r\n"
"<HTML>\r\n"
"<HEAD>\r\n"
"<TITLE>Error</TITLE>\r\n"
"<META HTTP-EQUIV='Content-Type' CONTENT='text/html; charset=ISO-8859-1'>\r\n"
"</HEAD>\r\n"
"<BODY><H1>401 Unauthorized.</H1></BODY>\r\n"
"</HTML>\r\n", rfc1123Time().c_str(), FormatFullVersion().c_str());
const char *cStatus;
if (nStatus == HTTP_OK) cStatus = "OK";
else if (nStatus == HTTP_BAD_REQUEST) cStatus = "Bad Request";
else if (nStatus == HTTP_FORBIDDEN) cStatus = "Forbidden";
else if (nStatus == HTTP_NOT_FOUND) cStatus = "Not Found";
else if (nStatus == HTTP_INTERNAL_SERVER_ERROR) cStatus = "Internal Server Error";
else cStatus = "";
return strprintf(
"HTTP/1.1 %d %s\r\n"
"Date: %s\r\n"
"Connection: %s\r\n"
"Content-Length: %"PRIszu"\r\n"
"Content-Type: application/json\r\n"
"Server: bitcoin-json-rpc/%s\r\n"
"\r\n"
"%s",
nStatus,
cStatus,
rfc1123Time().c_str(),
keepalive ? "keep-alive" : "close",
strMsg.size(),
FormatFullVersion().c_str(),
strMsg.c_str());
}
bool ReadHTTPRequestLine(std::basic_istream<char>& stream, int &proto,
string& http_method, string& http_uri)
{
string str;
getline(stream, str);
// HTTP request line is space-delimited
vector<string> vWords;
boost::split(vWords, str, boost::is_any_of(" "));
if (vWords.size() < 2)
return false;
// HTTP methods permitted: GET, POST
http_method = vWords[0];
if (http_method != "GET" && http_method != "POST")
return false;
// HTTP URI must be an absolute path, relative to current host
http_uri = vWords[1];
if (http_uri.size() == 0 || http_uri[0] != '/')
return false;
// parse proto, if present
string strProto = "";
if (vWords.size() > 2)
strProto = vWords[2];
proto = 0;
const char *ver = strstr(strProto.c_str(), "HTTP/1.");
if (ver != NULL)
proto = atoi(ver+7);
return true;
}
int ReadHTTPStatus(std::basic_istream<char>& stream, int &proto)
{
string str;
getline(stream, str);
vector<string> vWords;
boost::split(vWords, str, boost::is_any_of(" "));
if (vWords.size() < 2)
return HTTP_INTERNAL_SERVER_ERROR;
proto = 0;
const char *ver = strstr(str.c_str(), "HTTP/1.");
if (ver != NULL)
proto = atoi(ver+7);
return atoi(vWords[1].c_str());
}
int ReadHTTPHeaders(std::basic_istream<char>& stream, map<string, string>& mapHeadersRet)
{
int nLen = 0;
while (true)
{
string str;
std::getline(stream, str);
if (str.empty() || str == "\r")
break;
string::size_type nColon = str.find(":");
if (nColon != string::npos)
{
string strHeader = str.substr(0, nColon);
boost::trim(strHeader);
boost::to_lower(strHeader);
string strValue = str.substr(nColon+1);
boost::trim(strValue);
mapHeadersRet[strHeader] = strValue;
if (strHeader == "content-length")
nLen = atoi(strValue.c_str());
}
}
return nLen;
}
int ReadHTTPMessage(std::basic_istream<char>& stream, map<string,
string>& mapHeadersRet, string& strMessageRet,
int nProto)
{
mapHeadersRet.clear();
strMessageRet = "";
// Read header
int nLen = ReadHTTPHeaders(stream, mapHeadersRet);
if (nLen < 0 || nLen > (int)MAX_SIZE)
return HTTP_INTERNAL_SERVER_ERROR;
// Read message
if (nLen > 0)
{
vector<char> vch(nLen);
stream.read(&vch[0], nLen);
strMessageRet = string(vch.begin(), vch.end());
}
string sConHdr = mapHeadersRet["connection"];
if ((sConHdr != "close") && (sConHdr != "keep-alive"))
{
if (nProto >= 1)
mapHeadersRet["connection"] = "keep-alive";
else
mapHeadersRet["connection"] = "close";
}
return HTTP_OK;
}
bool HTTPAuthorized(map<string, string>& mapHeaders)
{
string strAuth = mapHeaders["authorization"];
if (strAuth.substr(0,6) != "Basic ")
return false;
string strUserPass64 = strAuth.substr(6); boost::trim(strUserPass64);
string strUserPass = DecodeBase64(strUserPass64);
return TimingResistantEqual(strUserPass, strRPCUserColonPass);
}
//
// JSON-RPC protocol. Bitcoin speaks version 1.0 for maximum compatibility,
// but uses JSON-RPC 1.1/2.0 standards for parts of the 1.0 standard that were
// unspecified (HTTP errors and contents of 'error').
//
// 1.0 spec: http://json-rpc.org/wiki/specification
// 1.2 spec: http://groups.google.com/group/json-rpc/web/json-rpc-over-http
// http://www.codeproject.com/KB/recipes/JSON_Spirit.aspx
//
string JSONRPCRequest(const string& strMethod, const Array& params, const Value& id)
{
Object request;
request.push_back(Pair("method", strMethod));
request.push_back(Pair("params", params));
request.push_back(Pair("id", id));
return write_string(Value(request), false) + "\n";
}
Object JSONRPCReplyObj(const Value& result, const Value& error, const Value& id)
{
Object reply;
if (error.type() != null_type)
reply.push_back(Pair("result", Value::null));
else
reply.push_back(Pair("result", result));
reply.push_back(Pair("error", error));
reply.push_back(Pair("id", id));
return reply;
}
string JSONRPCReply(const Value& result, const Value& error, const Value& id)
{
Object reply = JSONRPCReplyObj(result, error, id);
return write_string(Value(reply), false) + "\n";
}
void ErrorReply(std::ostream& stream, const Object& objError, const Value& id)
{
// Send error reply from json-rpc error object
int nStatus = HTTP_INTERNAL_SERVER_ERROR;
int code = find_value(objError, "code").get_int();
if (code == RPC_INVALID_REQUEST) nStatus = HTTP_BAD_REQUEST;
else if (code == RPC_METHOD_NOT_FOUND) nStatus = HTTP_NOT_FOUND;
string strReply = JSONRPCReply(Value::null, objError, id);
stream << HTTPReply(nStatus, strReply, false) << std::flush;
}
bool ClientAllowed(const boost::asio::ip::address& address)
{
// Make sure that IPv4-compatible and IPv4-mapped IPv6 addresses are treated as IPv4 addresses
if (address.is_v6()
&& (address.to_v6().is_v4_compatible()
|| address.to_v6().is_v4_mapped()))
return ClientAllowed(address.to_v6().to_v4());
if (address == asio::ip::address_v4::loopback()
|| address == asio::ip::address_v6::loopback()
|| (address.is_v4()
// Check whether IPv4 addresses match 127.0.0.0/8 (loopback subnet)
&& (address.to_v4().to_ulong() & 0xff000000) == 0x7f000000))
return true;
const string strAddress = address.to_string();
const vector<string>& vAllow = mapMultiArgs["-rpcallowip"];
BOOST_FOREACH(string strAllow, vAllow)
if (WildcardMatch(strAddress, strAllow))
return true;
return false;
}
//
// IOStream device that speaks SSL but can also speak non-SSL
//
template <typename Protocol>
class SSLIOStreamDevice : public iostreams::device<iostreams::bidirectional> {
public:
SSLIOStreamDevice(asio::ssl::stream<typename Protocol::socket> &streamIn, bool fUseSSLIn) : stream(streamIn)
{
fUseSSL = fUseSSLIn;
fNeedHandshake = fUseSSLIn;
}
void handshake(ssl::stream_base::handshake_type role)
{
if (!fNeedHandshake) return;
fNeedHandshake = false;
stream.handshake(role);
}
std::streamsize read(char* s, std::streamsize n)
{
handshake(ssl::stream_base::server); // HTTPS servers read first
if (fUseSSL) return stream.read_some(asio::buffer(s, n));
return stream.next_layer().read_some(asio::buffer(s, n));
}
std::streamsize write(const char* s, std::streamsize n)
{
handshake(ssl::stream_base::client); // HTTPS clients write first
if (fUseSSL) return asio::write(stream, asio::buffer(s, n));
return asio::write(stream.next_layer(), asio::buffer(s, n));
}
bool connect(const std::string& server, const std::string& port)
{
ip::tcp::resolver resolver(stream.get_io_service());
ip::tcp::resolver::query query(server.c_str(), port.c_str());
ip::tcp::resolver::iterator endpoint_iterator = resolver.resolve(query);
ip::tcp::resolver::iterator end;
boost::system::error_code error = asio::error::host_not_found;
while (error && endpoint_iterator != end)
{
stream.lowest_layer().close();
stream.lowest_layer().connect(*endpoint_iterator++, error);
}
if (error)
return false;
return true;
}
private:
bool fNeedHandshake;
bool fUseSSL;
asio::ssl::stream<typename Protocol::socket>& stream;
};
class AcceptedConnection
{
public:
virtual ~AcceptedConnection() {}
virtual std::iostream& stream() = 0;
virtual std::string peer_address_to_string() const = 0;
virtual void close() = 0;
};
template <typename Protocol>
class AcceptedConnectionImpl : public AcceptedConnection
{
public:
AcceptedConnectionImpl(
asio::io_service& io_service,
ssl::context &context,
bool fUseSSL) :
sslStream(io_service, context),
_d(sslStream, fUseSSL),
_stream(_d)
{
}
virtual std::iostream& stream()
{
return _stream;
}
virtual std::string peer_address_to_string() const
{
return peer.address().to_string();
}
virtual void close()
{
_stream.close();
}
typename Protocol::endpoint peer;
asio::ssl::stream<typename Protocol::socket> sslStream;
private:
SSLIOStreamDevice<Protocol> _d;
iostreams::stream< SSLIOStreamDevice<Protocol> > _stream;
};
void ServiceConnection(AcceptedConnection *conn);
// Forward declaration required for RPCListen
template <typename Protocol, typename SocketAcceptorService>
static void RPCAcceptHandler(boost::shared_ptr< basic_socket_acceptor<Protocol, SocketAcceptorService> > acceptor,
ssl::context& context,
bool fUseSSL,
AcceptedConnection* conn,
const boost::system::error_code& error);
/**
* Sets up I/O resources to accept and handle a new connection.
*/
template <typename Protocol, typename SocketAcceptorService>
static void RPCListen(boost::shared_ptr< basic_socket_acceptor<Protocol, SocketAcceptorService> > acceptor,
ssl::context& context,
const bool fUseSSL)
{
// Accept connection
AcceptedConnectionImpl<Protocol>* conn = new AcceptedConnectionImpl<Protocol>(acceptor->get_io_service(), context, fUseSSL);
acceptor->async_accept(
conn->sslStream.lowest_layer(),
conn->peer,
boost::bind(&RPCAcceptHandler<Protocol, SocketAcceptorService>,
acceptor,
boost::ref(context),
fUseSSL,
conn,
boost::asio::placeholders::error));
}
/**
* Accept and handle incoming connection.
*/
template <typename Protocol, typename SocketAcceptorService>
static void RPCAcceptHandler(boost::shared_ptr< basic_socket_acceptor<Protocol, SocketAcceptorService> > acceptor,
ssl::context& context,
const bool fUseSSL,
AcceptedConnection* conn,
const boost::system::error_code& error)
{
// Immediately start accepting new connections, except when we're cancelled or our socket is closed.
if (error != asio::error::operation_aborted && acceptor->is_open())
RPCListen(acceptor, context, fUseSSL);
AcceptedConnectionImpl<ip::tcp>* tcp_conn = dynamic_cast< AcceptedConnectionImpl<ip::tcp>* >(conn);
// TODO: Actually handle errors
if (error)
{
delete conn;
}
// Restrict callers by IP. It is important to
// do this before starting client thread, to filter out
// certain DoS and misbehaving clients.
else if (tcp_conn && !ClientAllowed(tcp_conn->peer.address()))
{
// Only send a 403 if we're not using SSL to prevent a DoS during the SSL handshake.
if (!fUseSSL)
conn->stream() << HTTPReply(HTTP_FORBIDDEN, "", false) << std::flush;
delete conn;
}
else {
ServiceConnection(conn);
conn->close();
delete conn;
}
}
void StartRPCThreads()
{
strRPCUserColonPass = mapArgs["-rpcuser"] + ":" + mapArgs["-rpcpassword"];
if (((mapArgs["-rpcpassword"] == "") ||
(mapArgs["-rpcuser"] == mapArgs["-rpcpassword"])) && Params().RequireRPCPassword())
{
unsigned char rand_pwd[32];
RAND_bytes(rand_pwd, 32);
string strWhatAmI = "To use bitcoind";
if (mapArgs.count("-server"))
strWhatAmI = strprintf(_("To use the %s option"), "\"-server\"");
else if (mapArgs.count("-daemon"))
strWhatAmI = strprintf(_("To use the %s option"), "\"-daemon\"");
uiInterface.ThreadSafeMessageBox(strprintf(
_("%s, you must set a rpcpassword in the configuration file:\n"
"%s\n"
"It is recommended you use the following random password:\n"
"rpcuser=bitcoinrpc\n"
"rpcpassword=%s\n"
"(you do not need to remember this password)\n"
"The username and password MUST NOT be the same.\n"
"If the file does not exist, create it with owner-readable-only file permissions.\n"
"It is also recommended to set alertnotify so you are notified of problems;\n"
"for example: alertnotify=echo %%s | mail -s \"Bitcoin Alert\" admin@foo.com\n"),
strWhatAmI.c_str(),
GetConfigFile().string().c_str(),
EncodeBase58(&rand_pwd[0],&rand_pwd[0]+32).c_str()),
"", CClientUIInterface::MSG_ERROR);
StartShutdown();
return;
}
assert(rpc_io_service == NULL);
rpc_io_service = new asio::io_service();
rpc_ssl_context = new ssl::context(*rpc_io_service, ssl::context::sslv23);
const bool fUseSSL = GetBoolArg("-rpcssl", false);
if (fUseSSL)
{
rpc_ssl_context->set_options(ssl::context::no_sslv2);
filesystem::path pathCertFile(GetArg("-rpcsslcertificatechainfile", "server.cert"));
if (!pathCertFile.is_complete()) pathCertFile = filesystem::path(GetDataDir()) / pathCertFile;
if (filesystem::exists(pathCertFile)) rpc_ssl_context->use_certificate_chain_file(pathCertFile.string());
else LogPrintf("ThreadRPCServer ERROR: missing server certificate file %s\n", pathCertFile.string().c_str());
filesystem::path pathPKFile(GetArg("-rpcsslprivatekeyfile", "server.pem"));
if (!pathPKFile.is_complete()) pathPKFile = filesystem::path(GetDataDir()) / pathPKFile;
if (filesystem::exists(pathPKFile)) rpc_ssl_context->use_private_key_file(pathPKFile.string(), ssl::context::pem);
else LogPrintf("ThreadRPCServer ERROR: missing server private key file %s\n", pathPKFile.string().c_str());
string strCiphers = GetArg("-rpcsslciphers", "TLSv1.2+HIGH:TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!3DES:@STRENGTH");
SSL_CTX_set_cipher_list(rpc_ssl_context->impl(), strCiphers.c_str());
}
// Try a dual IPv6/IPv4 socket, falling back to separate IPv4 and IPv6 sockets
const bool loopback = !mapArgs.count("-rpcallowip");
asio::ip::address bindAddress = loopback ? asio::ip::address_v6::loopback() : asio::ip::address_v6::any();
ip::tcp::endpoint endpoint(bindAddress, GetArg("-rpcport", Params().RPCPort()));
boost::system::error_code v6_only_error;
boost::shared_ptr<ip::tcp::acceptor> acceptor(new ip::tcp::acceptor(*rpc_io_service));
bool fListening = false;
std::string strerr;
try
{
acceptor->open(endpoint.protocol());
acceptor->set_option(boost::asio::ip::tcp::acceptor::reuse_address(true));
// Try making the socket dual IPv6/IPv4 (if listening on the "any" address)
acceptor->set_option(boost::asio::ip::v6_only(loopback), v6_only_error);
acceptor->bind(endpoint);
acceptor->listen(socket_base::max_connections);
RPCListen(acceptor, *rpc_ssl_context, fUseSSL);
fListening = true;
}
catch(boost::system::system_error &e)
{
strerr = strprintf(_("An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s"), endpoint.port(), e.what());
}
try {
// If dual IPv6/IPv4 failed (or we're opening loopback interfaces only), open IPv4 separately
if (!fListening || loopback || v6_only_error)
{
bindAddress = loopback ? asio::ip::address_v4::loopback() : asio::ip::address_v4::any();
endpoint.address(bindAddress);
acceptor.reset(new ip::tcp::acceptor(*rpc_io_service));
acceptor->open(endpoint.protocol());
acceptor->set_option(boost::asio::ip::tcp::acceptor::reuse_address(true));
acceptor->bind(endpoint);
acceptor->listen(socket_base::max_connections);
RPCListen(acceptor, *rpc_ssl_context, fUseSSL);
fListening = true;
}
}
catch(boost::system::system_error &e)
{
strerr = strprintf(_("An error occurred while setting up the RPC port %u for listening on IPv4: %s"), endpoint.port(), e.what());
}
if (!fListening) {
uiInterface.ThreadSafeMessageBox(strerr, "", CClientUIInterface::MSG_ERROR);
StartShutdown();
return;
}
rpc_worker_group = new boost::thread_group();
for (int i = 0; i < GetArg("-rpcthreads", 4); i++)
rpc_worker_group->create_thread(boost::bind(&asio::io_service::run, rpc_io_service));
}
void StopRPCThreads()
{
if (rpc_io_service == NULL) return;
deadlineTimers.clear();
rpc_io_service->stop();
if (rpc_worker_group != NULL)
rpc_worker_group->join_all();
delete rpc_worker_group; rpc_worker_group = NULL;
delete rpc_ssl_context; rpc_ssl_context = NULL;
delete rpc_io_service; rpc_io_service = NULL;
}
void RPCRunHandler(const boost::system::error_code& err, boost::function<void(void)> func)
{
if (!err)
func();
}
void RPCRunLater(const std::string& name, boost::function<void(void)> func, int64_t nSeconds)
{
assert(rpc_io_service != NULL);
if (deadlineTimers.count(name) == 0)
{
deadlineTimers.insert(make_pair(name,
boost::shared_ptr<deadline_timer>(new deadline_timer(*rpc_io_service))));
}
deadlineTimers[name]->expires_from_now(posix_time::seconds(nSeconds));
deadlineTimers[name]->async_wait(boost::bind(RPCRunHandler, _1, func));
}
class JSONRequest
{
public:
Value id;
string strMethod;
Array params;
JSONRequest() { id = Value::null; }
void parse(const Value& valRequest);
};
void JSONRequest::parse(const Value& valRequest)
{
// Parse request
if (valRequest.type() != obj_type)
throw JSONRPCError(RPC_INVALID_REQUEST, "Invalid Request object");
const Object& request = valRequest.get_obj();
// Parse id now so errors from here on will have the id
id = find_value(request, "id");
// Parse method
Value valMethod = find_value(request, "method");
if (valMethod.type() == null_type)
throw JSONRPCError(RPC_INVALID_REQUEST, "Missing method");
if (valMethod.type() != str_type)
throw JSONRPCError(RPC_INVALID_REQUEST, "Method must be a string");
strMethod = valMethod.get_str();
if (strMethod != "getwork" && strMethod != "getblocktemplate")
LogPrint("rpc", "ThreadRPCServer method=%s\n", strMethod.c_str());
// Parse params
Value valParams = find_value(request, "params");
if (valParams.type() == array_type)
params = valParams.get_array();
else if (valParams.type() == null_type)
params = Array();
else
throw JSONRPCError(RPC_INVALID_REQUEST, "Params must be an array");
}
static Object JSONRPCExecOne(const Value& req)
{
Object rpc_result;
JSONRequest jreq;
try {
jreq.parse(req);
Value result = tableRPC.execute(jreq.strMethod, jreq.params);
rpc_result = JSONRPCReplyObj(result, Value::null, jreq.id);
}
catch (Object& objError)
{
rpc_result = JSONRPCReplyObj(Value::null, objError, jreq.id);
}
catch (std::exception& e)
{
rpc_result = JSONRPCReplyObj(Value::null,
JSONRPCError(RPC_PARSE_ERROR, e.what()), jreq.id);
}
return rpc_result;
}
static string JSONRPCExecBatch(const Array& vReq)
{
Array ret;
for (unsigned int reqIdx = 0; reqIdx < vReq.size(); reqIdx++)
ret.push_back(JSONRPCExecOne(vReq[reqIdx]));
return write_string(Value(ret), false) + "\n";
}
void ServiceConnection(AcceptedConnection *conn)
{
bool fRun = true;
while (fRun)
{
int nProto = 0;
map<string, string> mapHeaders;
string strRequest, strMethod, strURI;
// Read HTTP request line
if (!ReadHTTPRequestLine(conn->stream(), nProto, strMethod, strURI))
break;
// Read HTTP message headers and body
ReadHTTPMessage(conn->stream(), mapHeaders, strRequest, nProto);
if (strURI != "/") {
conn->stream() << HTTPReply(HTTP_NOT_FOUND, "", false) << std::flush;
break;
}
// Check authorization
if (mapHeaders.count("authorization") == 0)
{
conn->stream() << HTTPReply(HTTP_UNAUTHORIZED, "", false) << std::flush;
break;
}
if (!HTTPAuthorized(mapHeaders))
{
LogPrintf("ThreadRPCServer incorrect password attempt from %s\n", conn->peer_address_to_string().c_str());
/* Deter brute-forcing short passwords.
If this results in a DoS the user really
shouldn't have their RPC port exposed. */
if (mapArgs["-rpcpassword"].size() < 20)
MilliSleep(250);
conn->stream() << HTTPReply(HTTP_UNAUTHORIZED, "", false) << std::flush;
break;
}
if (mapHeaders["connection"] == "close")
fRun = false;
JSONRequest jreq;
try
{
// Parse request
Value valRequest;
if (!read_string(strRequest, valRequest))
throw JSONRPCError(RPC_PARSE_ERROR, "Parse error");
string strReply;
// singleton request
if (valRequest.type() == obj_type) {
jreq.parse(valRequest);
Value result = tableRPC.execute(jreq.strMethod, jreq.params);
// Send reply
strReply = JSONRPCReply(result, Value::null, jreq.id);
// array of requests
} else if (valRequest.type() == array_type)
strReply = JSONRPCExecBatch(valRequest.get_array());
else
throw JSONRPCError(RPC_PARSE_ERROR, "Top-level object parse error");
conn->stream() << HTTPReply(HTTP_OK, strReply, fRun) << std::flush;
}
catch (Object& objError)
{
ErrorReply(conn->stream(), objError, jreq.id);
break;
}
catch (std::exception& e)
{
ErrorReply(conn->stream(), JSONRPCError(RPC_PARSE_ERROR, e.what()), jreq.id);
break;
}
}
}
json_spirit::Value CRPCTable::execute(const std::string &strMethod, const json_spirit::Array ¶ms) const
{
// Find method
const CRPCCommand *pcmd = tableRPC[strMethod];
if (!pcmd)
throw JSONRPCError(RPC_METHOD_NOT_FOUND, "Method not found");
if (pcmd->reqWallet && !pwalletMain)
throw JSONRPCError(RPC_METHOD_NOT_FOUND, "Method not found (disabled)");
// Observe safe mode
string strWarning = GetWarnings("rpc");
if (strWarning != "" && !GetBoolArg("-disablesafemode", false) &&
!pcmd->okSafeMode)
throw JSONRPCError(RPC_FORBIDDEN_BY_SAFE_MODE, string("Safe mode: ") + strWarning);
try
{
// Execute
Value result;
{
if (pcmd->threadSafe)
result = pcmd->actor(params, false);
else if (!pwalletMain) {
LOCK(cs_main);
result = pcmd->actor(params, false);
} else {
LOCK2(cs_main, pwalletMain->cs_wallet);
result = pcmd->actor(params, false);
}
}
return result;
}
catch (std::exception& e)
{
throw JSONRPCError(RPC_MISC_ERROR, e.what());
}
}
Object CallRPC(const string& strMethod, const Array& params)
{
if (mapArgs["-rpcuser"] == "" && mapArgs["-rpcpassword"] == "")
throw runtime_error(strprintf(
_("You must set rpcpassword=<password> in the configuration file:\n%s\n"
"If the file does not exist, create it with owner-readable-only file permissions."),
GetConfigFile().string().c_str()));
// Connect to localhost
bool fUseSSL = GetBoolArg("-rpcssl", false);
asio::io_service io_service;
ssl::context context(io_service, ssl::context::sslv23);
context.set_options(ssl::context::no_sslv2);
asio::ssl::stream<asio::ip::tcp::socket> sslStream(io_service, context);
SSLIOStreamDevice<asio::ip::tcp> d(sslStream, fUseSSL);
iostreams::stream< SSLIOStreamDevice<asio::ip::tcp> > stream(d);
bool fWait = GetBoolArg("-rpcwait", false); // -rpcwait means try until server has started
do {
bool fConnected = d.connect(GetArg("-rpcconnect", "127.0.0.1"), GetArg("-rpcport", itostr(Params().RPCPort())));
if (fConnected) break;
if (fWait)
MilliSleep(1000);
else
throw runtime_error("couldn't connect to server");
} while (fWait);
// HTTP basic authentication
string strUserPass64 = EncodeBase64(mapArgs["-rpcuser"] + ":" + mapArgs["-rpcpassword"]);
map<string, string> mapRequestHeaders;
mapRequestHeaders["Authorization"] = string("Basic ") + strUserPass64;
// Send request
string strRequest = JSONRPCRequest(strMethod, params, 1);
string strPost = HTTPPost(strRequest, mapRequestHeaders);
stream << strPost << std::flush;
// Receive HTTP reply status
int nProto = 0;
int nStatus = ReadHTTPStatus(stream, nProto);
// Receive HTTP reply message headers and body
map<string, string> mapHeaders;
string strReply;
ReadHTTPMessage(stream, mapHeaders, strReply, nProto);
if (nStatus == HTTP_UNAUTHORIZED)
throw runtime_error("incorrect rpcuser or rpcpassword (authorization failed)");
else if (nStatus >= 400 && nStatus != HTTP_BAD_REQUEST && nStatus != HTTP_NOT_FOUND && nStatus != HTTP_INTERNAL_SERVER_ERROR)
throw runtime_error(strprintf("server returned HTTP error %d", nStatus));
else if (strReply.empty())
throw runtime_error("no response from server");
// Parse reply
Value valReply;
if (!read_string(strReply, valReply))
throw runtime_error("couldn't parse reply from server");
const Object& reply = valReply.get_obj();
if (reply.empty())
throw runtime_error("expected reply to have result, error and id properties");
return reply;
}
template<typename T>
void ConvertTo(Value& value, bool fAllowNull=false)
{
if (fAllowNull && value.type() == null_type)
return;
if (value.type() == str_type)
{
// reinterpret string as unquoted json value
Value value2;
string strJSON = value.get_str();
if (!read_string(strJSON, value2))
throw runtime_error(string("Error parsing JSON:")+strJSON);
ConvertTo<T>(value2, fAllowNull);
value = value2;
}
else
{
value = value.get_value<T>();
}
}
// Convert strings to command-specific RPC representation
Array RPCConvertValues(const std::string &strMethod, const std::vector<std::string> &strParams)
{
Array params;
BOOST_FOREACH(const std::string ¶m, strParams)
params.push_back(param);
int n = params.size();
//
// Special case non-string parameter types
//
if (strMethod == "stop" && n > 0) ConvertTo<bool>(params[0]);
if (strMethod == "getaddednodeinfo" && n > 0) ConvertTo<bool>(params[0]);
if (strMethod == "setgenerate" && n > 0) ConvertTo<bool>(params[0]);
if (strMethod == "setgenerate" && n > 1) ConvertTo<boost::int64_t>(params[1]);
if (strMethod == "getnetworkhashps" && n > 0) ConvertTo<boost::int64_t>(params[0]);
if (strMethod == "getnetworkhashps" && n > 1) ConvertTo<boost::int64_t>(params[1]);
if (strMethod == "sendtoaddress" && n > 1) ConvertTo<double>(params[1]);
if (strMethod == "settxfee" && n > 0) ConvertTo<double>(params[0]);
if (strMethod == "getreceivedbyaddress" && n > 1) ConvertTo<boost::int64_t>(params[1]);
if (strMethod == "getreceivedbyaccount" && n > 1) ConvertTo<boost::int64_t>(params[1]);
if (strMethod == "listreceivedbyaddress" && n > 0) ConvertTo<boost::int64_t>(params[0]);
if (strMethod == "listreceivedbyaddress" && n > 1) ConvertTo<bool>(params[1]);
if (strMethod == "listreceivedbyaccount" && n > 0) ConvertTo<boost::int64_t>(params[0]);
if (strMethod == "listreceivedbyaccount" && n > 1) ConvertTo<bool>(params[1]);
if (strMethod == "getbalance" && n > 1) ConvertTo<boost::int64_t>(params[1]);
if (strMethod == "getblockhash" && n > 0) ConvertTo<boost::int64_t>(params[0]);
if (strMethod == "move" && n > 2) ConvertTo<double>(params[2]);
if (strMethod == "move" && n > 3) ConvertTo<boost::int64_t>(params[3]);
if (strMethod == "sendfrom" && n > 2) ConvertTo<double>(params[2]);
if (strMethod == "sendfrom" && n > 3) ConvertTo<boost::int64_t>(params[3]);
if (strMethod == "listtransactions" && n > 1) ConvertTo<boost::int64_t>(params[1]);
if (strMethod == "listtransactions" && n > 2) ConvertTo<boost::int64_t>(params[2]);
if (strMethod == "listaccounts" && n > 0) ConvertTo<boost::int64_t>(params[0]);
if (strMethod == "walletpassphrase" && n > 1) ConvertTo<boost::int64_t>(params[1]);
if (strMethod == "getblocktemplate" && n > 0) ConvertTo<Object>(params[0]);
if (strMethod == "listsinceblock" && n > 1) ConvertTo<boost::int64_t>(params[1]);
if (strMethod == "sendmany" && n > 1) ConvertTo<Object>(params[1]);
if (strMethod == "sendmany" && n > 2) ConvertTo<boost::int64_t>(params[2]);
if (strMethod == "addmultisigaddress" && n > 0) ConvertTo<boost::int64_t>(params[0]);
if (strMethod == "addmultisigaddress" && n > 1) ConvertTo<Array>(params[1]);
if (strMethod == "createmultisig" && n > 0) ConvertTo<boost::int64_t>(params[0]);
if (strMethod == "createmultisig" && n > 1) ConvertTo<Array>(params[1]);
if (strMethod == "listunspent" && n > 0) ConvertTo<boost::int64_t>(params[0]);
if (strMethod == "listunspent" && n > 1) ConvertTo<boost::int64_t>(params[1]);
if (strMethod == "listunspent" && n > 2) ConvertTo<Array>(params[2]);
if (strMethod == "getblock" && n > 1) ConvertTo<bool>(params[1]);
if (strMethod == "getrawtransaction" && n > 1) ConvertTo<boost::int64_t>(params[1]);
if (strMethod == "createrawtransaction" && n > 0) ConvertTo<Array>(params[0]);
if (strMethod == "createrawtransaction" && n > 1) ConvertTo<Object>(params[1]);
if (strMethod == "signrawtransaction" && n > 1) ConvertTo<Array>(params[1], true);
if (strMethod == "signrawtransaction" && n > 2) ConvertTo<Array>(params[2], true);
if (strMethod == "sendrawtransaction" && n > 1) ConvertTo<bool>(params[1], true);
if (strMethod == "gettxout" && n > 1) ConvertTo<boost::int64_t>(params[1]);
if (strMethod == "gettxout" && n > 2) ConvertTo<bool>(params[2]);
if (strMethod == "lockunspent" && n > 0) ConvertTo<bool>(params[0]);
if (strMethod == "lockunspent" && n > 1) ConvertTo<Array>(params[1]);
if (strMethod == "importprivkey" && n > 2) ConvertTo<bool>(params[2]);
if (strMethod == "verifychain" && n > 0) ConvertTo<boost::int64_t>(params[0]);
if (strMethod == "verifychain" && n > 1) ConvertTo<boost::int64_t>(params[1]);
if (strMethod == "keypoolrefill" && n > 0) ConvertTo<boost::int64_t>(params[0]);
return params;
}
int CommandLineRPC(int argc, char *argv[])
{
string strPrint;
int nRet = 0;
try
{
// Skip switches
while (argc > 1 && IsSwitchChar(argv[1][0]))
{
argc--;
argv++;
}
// Method
if (argc < 2)
throw runtime_error("too few parameters");
string strMethod = argv[1];
// Parameters default to strings
std::vector<std::string> strParams(&argv[2], &argv[argc]);
Array params = RPCConvertValues(strMethod, strParams);
// Execute
Object reply = CallRPC(strMethod, params);
// Parse reply
const Value& result = find_value(reply, "result");
const Value& error = find_value(reply, "error");
if (error.type() != null_type)
{
// Error
strPrint = "error: " + write_string(error, false);
int code = find_value(error.get_obj(), "code").get_int();
nRet = abs(code);
}
else
{
// Result
if (result.type() == null_type)
strPrint = "";
else if (result.type() == str_type)
strPrint = result.get_str();
else
strPrint = write_string(result, true);
}
}
catch (boost::thread_interrupted) {
throw;
}
catch (std::exception& e) {
strPrint = string("error: ") + e.what();
nRet = 87;
}
catch (...) {
PrintException(NULL, "CommandLineRPC()");
}
if (strPrint != "")
{
fprintf((nRet == 0 ? stdout : stderr), "%s\n", strPrint.c_str());
}
return nRet;
}
#ifdef TEST
int main(int argc, char *argv[])
{
#ifdef _MSC_VER
// Turn off Microsoft heap dump noise
_CrtSetReportMode(_CRT_WARN, _CRTDBG_MODE_FILE);
_CrtSetReportFile(_CRT_WARN, CreateFile("NUL", GENERIC_WRITE, 0, NULL, OPEN_EXISTING, 0, 0));
#endif
setbuf(stdin, NULL);
setbuf(stdout, NULL);
setbuf(stderr, NULL);
try
{
if (argc >= 2 && string(argv[1]) == "-server")
{
LogPrintf("server ready\n");
ThreadRPCServer(NULL);
}
else
{
return CommandLineRPC(argc, argv);
}
}
catch (boost::thread_interrupted) {
throw;
}
catch (std::exception& e) {
PrintException(&e, "main()");
} catch (...) {
PrintException(NULL, "main()");
}
return 0;
}
#endif
const CRPCTable tableRPC;
| csae1152/bitcoin | src/bitcoinrpc.cpp | C++ | mit | 51,032 |
require 'test_helper'
class RemoteEwayRapidTest < Test::Unit::TestCase
def setup
@gateway = EwayRapidGateway.new(fixtures(:eway_rapid))
@amount = 100
@failed_amount = -100
@credit_card = credit_card('4444333322221111')
@options = {
order_id: '1',
invoice: 'I1234',
billing_address: address,
description: 'Store Purchase',
redirect_url: 'http://bogus.com'
}
end
def test_successful_purchase
response = @gateway.purchase(@amount, @credit_card, @options)
assert_success response
assert_equal 'Transaction Approved Successful', response.message
end
def test_fully_loaded_purchase
response = @gateway.purchase(@amount, @credit_card,
redirect_url: 'http://awesomesauce.com',
ip: '0.0.0.0',
application_id: 'Woohoo',
partner_id: 'Woohoo',
transaction_type: 'Purchase',
description: 'Description',
order_id: 'orderid1',
invoice: 'I1234',
currency: 'AUD',
email: 'jim@example.com',
billing_address: {
title: 'Mr.',
name: 'Jim Awesome Smith',
company: 'Awesome Co',
address1: '1234 My Street',
address2: 'Apt 1',
city: 'Ottawa',
state: 'ON',
zip: 'K1C2N6',
country: 'CA',
phone: '(555)555-5555',
fax: '(555)555-6666'
},
shipping_address: {
title: 'Ms.',
name: 'Baker',
company: 'Elsewhere Inc.',
address1: '4321 Their St.',
address2: 'Apt 2',
city: 'Chicago',
state: 'IL',
zip: '60625',
country: 'US',
phone: '1115555555',
fax: '1115556666'
}
)
assert_success response
end
def test_successful_purchase_with_overly_long_fields
options = {
order_id: 'OrderId must be less than 50 characters otherwise it fails',
invoice: 'Max 12 chars',
description: 'EWay Rapid transactions fail if the description is more than 64 characters.',
billing_address: {
address1: 'The Billing Address 1 Cannot Be More Than Fifty Characters.',
address2: 'The Billing Address 2 Cannot Be More Than Fifty Characters.',
city: 'TheCityCannotBeMoreThanFiftyCharactersOrItAllFallsApart',
},
shipping_address: {
address1: 'The Shipping Address 1 Cannot Be More Than Fifty Characters.',
address2: 'The Shipping Address 2 Cannot Be More Than Fifty Characters.',
city: 'TheCityCannotBeMoreThanFiftyCharactersOrItAllFallsApart',
}
}
@credit_card.first_name = 'FullNameOnACardMustBeLessThanFiftyCharacters'
@credit_card.last_name = 'LastName'
response = @gateway.purchase(@amount, @credit_card, options)
assert_success response
assert_equal 'Transaction Approved Successful', response.message
end
def test_failed_purchase
response = @gateway.purchase(@failed_amount, @credit_card, @options)
assert_failure response
assert_equal 'Invalid Payment TotalAmount', response.message
end
def test_successful_authorize_and_capture
authorize = @gateway.authorize(@amount, @credit_card, @options)
assert_success authorize
assert_equal 'Transaction Approved Successful', authorize.message
capture = @gateway.capture(nil, authorize.authorization)
assert_success capture
end
def test_failed_authorize
response = @gateway.authorize(@failed_amount, @credit_card, @options)
assert_failure response
assert_equal 'Error Failed', response.message
end
def test_failed_capture
response = @gateway.capture(@amount, 'bogus')
assert_failure response
assert_equal 'Invalid Auth Transaction ID for Capture/Void', response.message
end
def test_successful_void
authorize = @gateway.authorize(@amount, @credit_card, @options)
assert_success authorize
void = @gateway.void(authorize.authorization)
assert_success void
end
def test_failed_void
response = @gateway.void('bogus')
assert_failure response
assert_equal 'Invalid Auth Transaction ID for Capture/Void', response.message
end
def test_successful_refund
response = @gateway.purchase(@amount, @credit_card, @options)
assert_success response
assert_equal 'Transaction Approved Successful', response.message
response = @gateway.refund(@amount, response.authorization, @options)
assert_success response
assert_equal 'Transaction Approved Successful', response.message
end
def test_failed_refund
response = @gateway.refund(@amount, 'fakeid', @options)
assert_failure response
assert_equal 'Invalid DirectRefundRequest, Transaction ID', response.message
end
def test_successful_store
response = @gateway.store(@credit_card, @options)
assert_success response
assert_equal 'Transaction Approved Successful', response.message
end
def test_failed_store
@options[:billing_address].merge!(country: nil)
response = @gateway.store(@credit_card, @options)
assert_failure response
assert_equal 'V6044', response.params['Errors']
assert_equal 'Customer CountryCode Required', response.message
end
def test_successful_update
response = @gateway.store(@credit_card, @options)
assert_success response
assert_equal 'Transaction Approved Successful', response.message
response = @gateway.update(response.authorization, @credit_card, @options)
assert_success response
assert_equal 'Transaction Approved Successful', response.message
end
def test_successful_store_purchase
response = @gateway.store(@credit_card, @options)
assert_success response
assert_equal 'Transaction Approved Successful', response.message
response = @gateway.purchase(@amount, response.authorization, transaction_type: 'MOTO')
assert_success response
assert_equal 'Transaction Approved Successful', response.message
end
def test_invalid_login
gateway = EwayRapidGateway.new(
login: 'bogus',
password: 'bogus'
)
response = gateway.purchase(@amount, @credit_card, @options)
assert_failure response
assert_equal 'Unauthorized', response.message
end
def test_transcript_scrubbing
transcript = capture_transcript(@gateway) do
@gateway.purchase(100, @credit_card_success, @params)
end
clean_transcript = @gateway.scrub(transcript)
assert_scrubbed(@credit_card_success.number, clean_transcript)
assert_scrubbed(@credit_card_success.verification_value.to_s, clean_transcript)
end
end
| reinteractive/active_merchant | test/remote/gateways/remote_eway_rapid_test.rb | Ruby | mit | 6,573 |
class Solution:
# @param s, a string
# @param dict, a set of string
# @return a boolean
def wordBreak(self, s, dict):
solved = [False for i in range(len(s) + 1)]
solved[0] = True
for i in range(len(s)):
for j in range(i + 1):
if s[j : i + 1] in dict:
solved[i + 1] |= solved[j]
return solved[len(s)]
| happylixue/LeetCodeSol | problems/word-break/sol.py | Python | mit | 396 |
/* ==========================================================
* autocomplete.js
* Deal with the Typeahead.js/Bloodhound library to build the search field autocomplete
*
* Author: Yann, yann@antistatique.net
* Date: 2014-05-01 14:23:18
*
* Copyright 2014 Federal Chancellery of Switzerland
* Licensed under MIT
========================================================== */
(function($, data) {
'use strict';
var $searchFields = $('.form-search .search-field');
if (data) {
// Init the Bloodhound suggestion engine
var bloodhound = new Bloodhound({
datumTokenizer: Bloodhound.tokenizers.obj.whitespace('value'),
queryTokenizer: Bloodhound.tokenizers.whitespace,
local: $.map(data, function(state) { return { value: state }; })
});
bloodhound.initialize();
// Init Typeahead on search-fields
$searchFields.typeahead({
hint: true,
highlight: true,
minLength: 1,
},
{
name: 'search',
displayKey: 'value',
source: bloodhound.ttAdapter()
});
}
// Insert the icons
$searchFields.after('<span class="icon icon--close" data-form-search-clear></span>');
$('.form-search').append('<button class="icon icon--search icon--before"></button>');
$('body').on('click', '[data-form-search-clear]', function () {
$('#search-field').val('').focus(); // clear search field and refocus it
});
}) (jQuery, (typeof searchData === 'undefined' ? false : searchData));
/* ==========================================================
* carousel.js
* Carousel helper
*
* Author: Yann, yann@antistatique.net
* Date: 2014-05-15 13:55:53
*
* Copyright 2014 Federal Chancellery of Switzerland
* Licensed under MIT
========================================================== */
(function($) {
'use strict';
$(window).load(function () {
carouselInit(jQuery);
});
$(window).resize(function () {
carouselInit(jQuery);
});
// slideshow counter
var slideshow_total = $('.carousel-slideshow .item').length;
$('#carousel-total').text(slideshow_total);
$('.carousel-slideshow').on('slid.bs.carousel', function () {
var carouselData = $(this).data('bs.carousel');
var currentIndex = carouselData.getItemIndex(carouselData.$element.find('.item.active'));
var total = carouselData.$items.length;
var text = (currentIndex + 1);
$('#carousel-index').text(text);
$('#carousel-total').text(total);
});
}) (jQuery);
function carouselInit($) {
'use strict';
var $carousel = $('.carousel:not(.carousel-slideshow)');
$('.carousel .item:first-child').addClass('first');
$('.carousel .item:last-child').addClass('last');
$('.carousel').each(function() {
disableControl($(this));
});
$('.carousel').on('slid.bs.carousel', function () {
disableControl($(this));
});
if($carousel) {
$carousel.each(function () {
var biggestHeight = 0,
titleHeight = $(this).find('.item.active h3:first-child').height(),
imgHeight = $(this).find('.item.active .carousel-img').height();
$(this).find('.carousel-indicators, .carousel-control').css('top', titleHeight + imgHeight + 50);
$(this).find('.item').each(function () {
if ($(this).height() >= biggestHeight) {
biggestHeight = $(this).height();
}
});
$(this).find('.item').height(biggestHeight);
});
}
}
function disableControl(element) {
'use strict';
if (element.find('.first').hasClass('active')) {
element.find('.left').addClass('disabled').attr('aria-disabled', 'true');
} else {
element.find('.left').removeClass('disabled').attr('aria-disabled', 'false');
}
if (element.find('.last').hasClass('active')) {
element.find('.right').addClass('disabled').attr('aria-disabled', 'true');
} else {
element.find('.right').removeClass('disabled').attr('aria-disabled', 'false');
}
}
/* ==========================================================
* collapse.js
* Add class when nav collapse is open
*
* Author: Yann, yann@antistatique.net
* Date: 2014-05-06
*
* Copyright 2014 Federal Chancellery of Switzerland
* Licensed under MIT
========================================================== */
(function($) {
'use strict';
// Normal Collapse
$('.collapse:not(tbody)').on('show.bs.collapse', function () {
$(this)
.prev()
.addClass('active icon--root')
.removeClass('icon--greater')
.attr({
'aria-selected': 'true',
'aria-expanded': 'true'
});
});
$('.collapse:not(tbody)').on('hide.bs.collapse', function () {
$(this)
.prev()
.removeClass('active icon--root')
.addClass('icon--greater')
.attr( {
'aria-selected': 'false',
'aria-expanded': 'false'
});
});
// Table Collapse
$('tbody.collapse').on('show.bs.collapse', function () {
$(this)
.prev().find('[data-toggle=collapse]')
.addClass('active')
.attr({
'aria-selected': 'true',
'aria-expanded': 'true'
});
});
$('tbody.collapse').on('hide.bs.collapse', function () {
$(this)
.prev().find('[data-toggle=collapse]')
.removeClass('active')
.attr({
'aria-selected': 'false',
'aria-expanded': 'false'
});
});
}) (jQuery);
/* ==========================================================
* drilldown.js
* Drilldown plugin scripts. For page-list-nav element
*
* Author: Toni Fisler, toni@antistatique.net
* Date: 2014-05-30 09:02:09
*
* Copyright 2014 Federal Chancellery of Switzerland
* Licensed under MIT
========================================================== */
(function($) {
'use strict';
var options = {
event: 'click', // * View note below
selector: 'a', // * View note below
speed: 100,
cssClass: {
container: 'drilldown-container',
root: 'nav-page-list',
sub: 'drilldown-sub',
back: 'drilldown-back'
}
};
$('.drilldown').drilldown(options);
}) (jQuery);
/* ==========================================================
* global-nav.js
* Global Navigation syripts
*
* Author: Toni Fisler, toni@antistatique.net
* Date: 2014-05-27 16:36:15
*
* Copyright 2014 Federal Chancellery of Switzerland
* Licensed under MIT
========================================================== */
(function($) {
'use strict';
// Handle scroll to position nav as fixed
var top = 36;
$(window).scroll(function () {
var y = $(this).scrollTop();
if (y >= top) {
if (!$('.nav-mobile').hasClass('fixed')) {
$('.nav-mobile').addClass('fixed')
.after('<div class="nav-mobile-spacer" id="spacer" style="height:36px;"></div>');
}
}
else {
if ($('.nav-mobile').hasClass('fixed')) {
$('.nav-mobile').removeClass('fixed');
$('#spacer').remove();
}
}
});
}) (jQuery);
// OUTLINE.JS
// https://github.com/lindsayevans/outline.js
//
// Based on http://www.paciellogroup.com/blog/2012/04/how-to-remove-css-outlines-in-an-accessible-manner/
//
// Hide outline on mouse interactions
// Show it on keyboard interactions
(function(doc){
'use strict';
var styleElement = doc.createElement('STYLE'),
domEvents = 'addEventListener' in doc,
addListener = function(type, callback){
// Basic cross-browser event handling
if (domEvents) {
doc.addEventListener(type, callback);
} else {
doc.attachEvent('on' + type, callback);
}
},
setCSS = function(cssText){
!!styleElement.styleSheet ? styleElement.styleSheet.cssText = cssText : styleElement.innerHTML = cssText;
};
doc.getElementsByTagName('HEAD')[0].appendChild(styleElement);
// Using mousedown instead of mouseover, so that previously focused elements don't lose focus ring on mouse move
addListener('mousedown', function(){
setCSS(':focus{outline:0!important}::-moz-focus-inner{border:0!important}');
});
addListener('keydown', function(){
setCSS('');
});
})(document);
/* ==========================================================
* print.js
* Add print preview windows
*
* Author: Yann, yann@antistatique.net
* Date: 2015-02-02
*
* Copyright 2014 Federal Chancellery of Switzerland
* Licensed under MIT
========================================================== */
(function($) {
'use strict';
// Initialization
$.fn.printPreview = function() {
return this;
};
$.printPreview = {
printPreview: function(element) {
var $body = $('body'),
$container = $('.container-main'),
footnoteLinks = "",
linksIndex = 0;
$body.find('.nav-mobile, .drilldown, .nav-main, .header-separator, .nav-service, .nav-lang, .form-search, .yamm--select, header > div:first-child, footer, .alert, .icon--print, .social-sharing, form, .nav-process, .carousel-indicators, .carousel-control, .breadcrumb, .pagination-container').remove();
// if an element is passed, we want it to be the only thing to print out
if (element) {
element = $('[data-print=' + element + ']').clone(); // clone to fix issue with IE render
var header = $('header').clone(), // clone to fix issue with IE render
title = element.attr('data-title') ? '<h1>' + element.attr('data-title') + '</h1>' : '';
$container.addClass('print-element').html('').append(header, title, element);
}
$body.addClass('print-preview');
$container.prepend('<div class="row" id="print-settings">'+
'<div class="col-sm-12">'+
'<nav class="pagination-container clearfix">'+
'<span class="pull-left">'+
'<input type="checkbox" id="footnote-links"> '+
'<label for="footnote-links">Links as footnotes</label>'+
'</span>'+
'<ul class="pull-right">'+
'<li>'+
'<button id="print-button" title="print" class="btn"><span class="icon icon--print"></span></button>'+
' '+
'<button id="close-button" title="close" class="btn btn-secondary"><span class="icon icon--close"></span></button>'+
'</li>'+
'</ul>'+
'</nav>'+
'</div>'+
'</div>');
$('#print-button').click(function () {
$.printPreview.printProcess();
});
$('#close-button').click(function () {
$.printPreview.printClose();
});
$('a').not('.access-keys a').each(function () {
var target = $(this).attr('href');
target = String(target);
if (target !== "undefined" && target.indexOf("http") === 0) {
linksIndex ++;
footnoteLinks += '<li>'+target+'</li>';
$('<sup class="link-ref">('+linksIndex+')</sup>').insertAfter(this);
}
});
$('#footnote-links').change(function(){
if (this.checked) {
$container.append('<div id="footnote-links-wrapper" class="row footnote-links-wrapper">'+
'<div class="col-sm-12">'+
'<h3>Page Links</h3><hr>'+
'<ol>'+
footnoteLinks+
'</ol>'+
'</div>'+
'</div>');
$body.addClass('print-footnotes');
} else {
$('#footnote-links-wrapper').remove();
$body.removeClass('print-footnotes');
}
});
},
printProcess: function() {
window.print();
},
printClose: function() {
window.location.reload();
}
};
}) (jQuery);
/* ==========================================================
* rich-menu.js
* Add overlay when openning a rich yamm menu and define open/close events
*
* Author: Yann Gouffon, yann@antistatique.net
* Date: 2014-04-30 11:48:48
*
* Copyright 2014 Federal Chancellery of Switzerland
* Licensed under MIT
=========================================================== */
(function($) {
'use strict';
// Keep jQuery object in variables
var $yamm = $('.yamm'),
$yammClose = $('.yamm-close, .yamm-close-bottom'),
$dropdown = $('.yamm .dropdown'),
$dropdownToggle = $('.yamm .dropdown-toggle');
// Toggle dropdown and fix z-index errors
$yamm.each(function () {
var $that = $(this);
$that.on('click', '.dropdown-toggle', function () {
if (!$(this).parent().hasClass('open')){
var dropdownHeight = $(window).height() - 49;
$that.find('.drilldown-container').height( dropdownHeight );
}
});
});
$dropdownToggle.on('click', function() {
$(this).parents($dropdown).trigger('get.hidden');
});
$dropdown.on({
"shown.bs.dropdown": function() { this.closable = false; },
"get.hidden": function() { this.closable = true; }
});
$('.dropdown').on('show.bs.dropdown', function () {
$dropdown.removeClass('open');
$yamm.removeClass('nav-open');
$(this).parents($yamm).addClass('nav-open');
});
$dropdown.on('hide.bs.dropdown', function () {
// only remove the nav-open class if effectively closing dropdown
if (this.closable) {
$yamm.removeClass('nav-open');
}
return this.closable;
});
$(document).on('click', function(e) {
// hide dropdown if dropdown is open and target is not in dropdown
if ($('.dropdown.open').length > 0 && $(e.target).parents('.dropdown').length === 0) {
$('.dropdown.open .dropdown-toggle').trigger('click');
}
});
// Trigger close yamm menu
$dropdown.each(function () {
var $that = $(this);
$that.find($yammClose).click( function (e) {
e.preventDefault();
$that.find($dropdownToggle).trigger("click");
});
});
}) (jQuery);
/* ==========================================================
* select.js
* Scripts handling `select` elements
*
* Author: Toni Fisler, toni@antistatique.net
* Date: 2014-04-30 10:20:33
*
* Copyright 2014 Federal Chancellery of Switzerland
* Licensed under MIT
========================================================== */
(function($) {
'use strict';
$(document).ready(function(){
$('select').chosen({
disable_search_threshold: 10
});
});
}) (jQuery);
/* ==========================================================
* shame.js
* DOM rewritting on mobile, issue #160
*
* Author: Yann, yann@antistatique.net
* Date: 2014-06-18 15:57:23
*
* Copyright 2014 Federal Chancellery of Switzerland
* Licensed under MIT
========================================================== */
(function($) {
'use strict';
$(document).ready(function () {
var id;
var isCarouselified = false;
var isCollapsified = false;
carouselify();
collapsify();
$(window).resize(function() {
clearTimeout(id);
id = setTimeout(resizeLauncher, 500);
});
function resizeLauncher() {
carouselify();
collapsify();
}
function carouselify() {
var $tabFocus = $('.tab-focus'),
focusIndex = 0;
if($tabFocus && $(window).width() <= 767 && !isCarouselified ) {
isCarouselified = true;
$tabFocus.each(function () {
var $that = $(this),
itemIndex = -1;
focusIndex += 1;
$that.attr('id', 'tab-focus-'+focusIndex);
$that.next('.nav-tabs').hide();
// Prevent those mobile-only carousels from riding automatically by setting interval to 0
$that.addClass('carousel slide').removeClass('tab-content tab-border').attr('data-interval', 0);
$that.wrapInner('<div class="carousel-inner"></div>');
$that.prepend('<ol class="carousel-indicators"></ol>');
$that.find('.tab-pane').each(function () {
itemIndex += 1;
$(this).removeClass('tab-pane in active').addClass('item');
$that.find('.carousel-indicators').append('<li data-target="#tab-focus-' + focusIndex + '" data-slide-to="' + itemIndex + '"></li>');
});
$that.find('.item:first').addClass('active');
$that.find('.carousel-indicators li:first-child').addClass('active');
$that.append('<a class="left carousel-control icon icon--before icon--less" href="#tab-focus-' + focusIndex + '" data-slide="prev"></a><a class="right carousel-control icon icon--before icon--greater" href="#tab-focus-' + focusIndex + '" data-slide="next"></a>');
});
}
else if($tabFocus && $(window).width() > 767 && isCarouselified) {
isCarouselified = false;
$tabFocus.each(function () {
var $that = $(this);
focusIndex -= 1;
$that.attr('id', '');
$that.next('.nav-tabs-focus').css('display', 'flex'); // we can't use .show() because it should be a flex wrapper
$that.removeClass('carousel slide').addClass('tab-content tab-border');
$that.find('ol.carousel-indicators').remove();
$that.find('.item').each(function () {
$(this).addClass('tab-pane').removeClass('item');
$(this).css('height', 'auto');
});
$that.find('.tab-pane:first-child').addClass('active in');
if ( $that.find('.tab-pane').parent().hasClass('carousel-inner') ) {
$that.find('.tab-pane').unwrap();
}
$that.find('.carousel-control').remove();
});
}
}
function collapsify() {
var $navTab = $('.nav-tabs:not(.focus)'),
$collapsify = $('.collapsify'),
linkIndex = 0;
if($navTab && $(window).width() <= 767 && !isCollapsified ) {
isCollapsified = true;
$navTab.not('.tab-focus').each(function (){
var $that = $(this);
$that.removeClass('nav-tabs').addClass('collapsify');
$that.next('.tab-content').hide();
$that.find('a').each(function (){
var $target = $(this).attr('href');
linkIndex += 1;
$(this).unwrap();
$('<div class="collapse" id="collapse-' + linkIndex + '">' + $($target).html() + '</div>').insertAfter(this);
$(this).attr('data-toggle', 'collapse');
$(this).attr('data-target', '#collapse-' + linkIndex);
$(this).addClass('collapse-closed');
$(this).click(function(){
$(this).toggleClass('collapse-closed');
});
});
//$that.find('a:first-child').removeClass('collapse-closed').next('.collapse').addClass('in');
});
}
else if($collapsify && $(window).width() > 767 && isCollapsified) {
isCollapsified = false;
$collapsify.each(function (){
var $that = $(this);
$that.addClass('nav-tabs').removeClass('collapsify');
$that.next('.tab-content').show();
$that.find('a').each(function (){
linkIndex -= 1;
$(this).wrap('<li></li>');
$(this).parent().next('.collapse').remove();
$(this).attr('data-toggle', 'tab');
$(this).attr('data-target', '');
$(this).removeClass('collapse-closed');
});
$that.find('li a').each(function () {
var $tabTarget = $(this).attr('href');
if($($tabTarget).hasClass('active')){
$(this).parent().addClass('active');
}
});
});
}
}
});
}) (jQuery);
/* ==========================================================
* subnavigation.js
* Sub-navigation scripts, handles mainly how the nav-page-list behaves on small
* screens
*
* Author: Toni Fisler, toni@antistatique.net
* Date: 2014-09-24 10:18:19
*
* Copyright 2014 Federal Chancellery of Switzerland
* Licensed under MIT
========================================================== */
(function($) {
'use strict';
subNavInit(jQuery);
$(window).resize(function () {
subNavInit(jQuery);
});
$('a[href=#collapseSubNav]').on('click', function() {
$(this).attr('aria-expanded', ($(this).attr('aria-expanded') === 'true' ? 'false' : 'true') );
});
}) (jQuery);
function subNavInit($) {
'use strict';
var $drilldown = $('.drilldown[class*=col-]');
if ($(window).width() <= 767 && !$drilldown.hasClass('collapse-enabled')) {
$drilldown
.addClass('collapse-enabled')
.find('.drilldown-container')
.addClass('collapse')
.attr('id', 'collapseSubNav');
} else if ($(window).width() > 767 && $drilldown.hasClass('collapse-enabled')) {
$drilldown
.removeClass('collapse-enabled')
.find('.drilldown-container')
.removeClass('collapse in')
.attr('id', '')
.css({
'height': 'auto'
});
}
}
/* ==========================================================
* tablesorter.js
* Control tablesort from markup
*
* Author: Simon Perdrisat, simon@antistatique.net
* Date: 2014-05-01 11:11:33
*
* Copyright 2014 Federal Chancellery of Switzerland
* Licensed under MIT
========================================================== */
(function($) {
'use strict';
$('.table-sort').tablesorter();
}) (jQuery);
/* ==========================================================
* tabs.js
* JS for the tabs and tab-focus elements
*
* Copyright 2014 Federal Chancellery of Switzerland
* Licensed under MIT
========================================================== */
(function($) {
'use strict';
/**
* @constructor
* @param {Object} domNode
*/
function TabFocus(element) {
this.$wrapper = $(element).parent();
this.domNodes = '.tab-focus, .nav-tabs-focus';
this.delay = 3000;
this.playing = null;
this.interval = null;
this.$wrapper
.on('click', '.nav-tabs-focus', function() {
this.pause(null, true);
}.bind(this))
.on('click', '.tab-focus-control', function() {
if (this.playing) {
this.pause(null, true);
} else {
this.play(null, true);
}
}.bind(this));
this.play(null, true);
}
TabFocus.prototype = {
addListeners: function() {
this.$wrapper
.on('mouseenter.tabfocus focus.tabfocus', this.domNodes, this.pause.bind(this))
.on('mouseleave.tabfocus blur.tabfocus', this.domNodes, this.play.bind(this));
},
removeListeners: function() {
this.$wrapper
.off('mouseenter.tabfocus focus.tabfocus', this.domNodes)
.off('mouseleave.tabfocus blur.tabfocus', this.domNodes);
},
play: function(event, startListening) {
if (this.interval) {
clearInterval(this.interval);
}
this.interval = setInterval(this.slide.bind(this), this.delay);
if (startListening) {
this.playing = true;
this.addListeners();
this.$wrapper.find('.tab-focus-control .icon').removeClass('icon--play').addClass('icon--pause');
}
},
pause: function(event, stopListening) {
clearInterval(this.interval);
if (stopListening) {
this.playing = false;
this.removeListeners();
this.$wrapper.find('.tab-focus-control .icon').removeClass('icon--pause').addClass('icon--play');
}
},
slide: function() {
var $nav = this.$wrapper.find('.nav-tabs-focus');
// If the nav is hidden, it means the focus has been changed for a carousel (mobile)
// We don’t want to slide automatically anymore
if ($nav.is(':hidden')) {
return this.pause(null, true);
}
if ($nav.find('> li').length) {
var tabs = this.$wrapper.find('.nav-tabs-focus > li'),
activeTab = tabs.filter('.active'),
nextTab = activeTab.next('li'),
newTab = nextTab.length ? nextTab.find('a') : tabs.eq(0).find('a');
newTab.tab('show');
}
}
};
$.fn.tabFocus = function() {
return this.each(function() {
if (!$.data(this, 'TabFocus')) {
$.data(this, 'TabFocus', new TabFocus(this));
}
});
};
$('.tab-focus').tabFocus();
})(jQuery);
/* ==========================================================
* treecrumb.js
* Change icon class to change the caret direction
*
* Author: Yann Gouffon, yann@antistatique.net
* Date: 2014-05-01 11:11:33
*
* Copyright 2014 Federal Chancellery of Switzerland
* Licensed under MIT
========================================================== */
(function($) {
'use strict';
$('.treecrumb').each(function() {
var $that = $(this);
$that.on('hide.bs.dropdown', function() {
$that.find('.dropdown-toggle span').removeClass('icon--bottom');
$that.find('.dropdown-toggle span').addClass('icon--right');
});
$that.on('show.bs.dropdown', function(e) {
var target = e.relatedTarget;
$that.find('.dropdown-toggle span').removeClass('icon--bottom');
$that.find('.dropdown-toggle span').addClass('icon--right');
$(target).find('span').removeClass('icon--right');
$(target).find('span').addClass('icon--bottom');
});
});
}) (jQuery);
| eonum/drg-search | vendor/assets/javascripts/styleguide.js | JavaScript | mit | 24,803 |
import { platformBrowser } from '@angular/platform-browser';
import { enableProdMode } from '@angular/core';
import { AppModuleNgFactory } from '../../../temp/app/textarea/multiplevalues/app.module.ngfactory';
enableProdMode();
platformBrowser().bootstrapModuleFactory(AppModuleNgFactory); | luissancheza/sice | js/jqwidgets/demos/angular/app/textarea/multiplevalues/main.ts | TypeScript | mit | 297 |
<?php
/**
* @author Alex Bilbie <hello@alexbilbie.com>
* @copyright Copyright (c) Alex Bilbie
* @license http://mit-license.org/
*
* @link https://github.com/thephpleague/oauth2-server
*/
namespace League\OAuth2\Server\Entities\Traits;
trait AuthCodeTrait
{
/**
* @var null|string
*/
protected $redirectUri;
/**
* @return string|null
*/
public function getRedirectUri()
{
return $this->redirectUri;
}
/**
* @param string $uri
*/
public function setRedirectUri($uri)
{
$this->redirectUri = $uri;
}
}
| nusendra/nusendra-blog | vendor/league/oauth2-server/src/Entities/Traits/AuthCodeTrait.php | PHP | mit | 613 |
// ********************************************************************************************************
// Product Name: DotSpatial.Forms.LayoutForm
// Description: A form that shows the mapwindow layout
// ********************************************************************************************************
//
// The Original Code is from MapWindow.dll version 6.0
//
// The Initial Developer of this Original Code is by Brian Marchionni Aug 2009
//
// Contributor(s): (Open source contributors should list themselves and their modifications here).
// ------------------|------------|---------------------------------------------------------------
// Ted Dunsford | 8/28/2009 | Cleaned up some code formatting using resharper
// ********************************************************************************************************
using System;
using System.Drawing;
using System.IO;
using System.Windows.Forms;
namespace DotSpatial.Controls
{
/// <summary>
/// This is the primary form where the print layout content is organized before printing
/// </summary>
public partial class LayoutForm : Form
{
/// <summary>
/// Default constructor for creating a new instance of hte Layout form
/// </summary>
public LayoutForm()
{
InitializeComponent();
if (Mono.Mono.IsRunningOnMono())
{
// On Mac and possibly other Mono platforms, GdipCreateLineBrushFromRect
// in gdiplus native lib returns InvalidParameter in Mono file LinearGradientBrush.cs
// if a StripPanel's Width or Height is 0, so force them to non-0.
_toolStripContainer1.TopToolStripPanel.Size = new Size(_toolStripContainer1.TopToolStripPanel.Size.Width, 1);
_toolStripContainer1.BottomToolStripPanel.Size = new Size(_toolStripContainer1.BottomToolStripPanel.Size.Width, 1);
_toolStripContainer1.LeftToolStripPanel.Size = new Size(1, _toolStripContainer1.LeftToolStripPanel.Size.Height);
_toolStripContainer1.RightToolStripPanel.Size = new Size(1, _toolStripContainer1.RightToolStripPanel.Size.Height);
}
}
/// <summary>
/// Gets or sets the map that will be used in the layout
/// </summary>
public Map MapControl
{
get { return _layoutControl1.MapControl; }
set { _layoutControl1.MapControl = value; }
}
/// <summary>
/// Gets layout control.
/// </summary>
public LayoutControl LayoutControl
{
get { return _layoutControl1; }
}
private void layoutMenuStrip1_CloseClicked(object sender, EventArgs e)
{
this.Close();
}
private void layoutControl1_FilenameChanged(object sender, EventArgs e)
{
Text = !string.IsNullOrEmpty(_layoutControl1.Filename)
? "DotSpatial Print Layout - " + Path.GetFileName(this._layoutControl1.Filename)
: "DotSpatial Print Layout";
}
private void LayoutForm_Load(object sender, EventArgs e)
{
if (MapControl != null)
{
var mapElement = _layoutControl1.CreateMapElement();
mapElement.Size = _layoutControl1.Size;
_layoutControl1.AddToLayout(mapElement);
}
}
}
} | swsglobal/DotSpatial | Source/DotSpatial.Controls/LayoutForm.cs | C# | mit | 3,537 |
# mako/codegen.py
# Copyright 2006-2019 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""provides functionality for rendering a parsetree constructing into module
source code."""
import json
import re
import time
from mako import ast
from mako import compat
from mako import exceptions
from mako import filters
from mako import parsetree
from mako import util
from mako.pygen import PythonPrinter
MAGIC_NUMBER = 10
# names which are hardwired into the
# template and are not accessed via the
# context itself
TOPLEVEL_DECLARED = set(["UNDEFINED", "STOP_RENDERING"])
RESERVED_NAMES = set(["context", "loop"]).union(TOPLEVEL_DECLARED)
def compile( # noqa
node,
uri,
filename=None,
default_filters=None,
buffer_filters=None,
imports=None,
future_imports=None,
source_encoding=None,
generate_magic_comment=True,
disable_unicode=False,
strict_undefined=False,
enable_loop=True,
reserved_names=frozenset(),
):
"""Generate module source code given a parsetree node,
uri, and optional source filename"""
# if on Py2K, push the "source_encoding" string to be
# a bytestring itself, as we will be embedding it into
# the generated source and we don't want to coerce the
# result into a unicode object, in "disable_unicode" mode
if not compat.py3k and isinstance(source_encoding, compat.text_type):
source_encoding = source_encoding.encode(source_encoding)
buf = util.FastEncodingBuffer()
printer = PythonPrinter(buf)
_GenerateRenderMethod(
printer,
_CompileContext(
uri,
filename,
default_filters,
buffer_filters,
imports,
future_imports,
source_encoding,
generate_magic_comment,
disable_unicode,
strict_undefined,
enable_loop,
reserved_names,
),
node,
)
return buf.getvalue()
class _CompileContext(object):
def __init__(
self,
uri,
filename,
default_filters,
buffer_filters,
imports,
future_imports,
source_encoding,
generate_magic_comment,
disable_unicode,
strict_undefined,
enable_loop,
reserved_names,
):
self.uri = uri
self.filename = filename
self.default_filters = default_filters
self.buffer_filters = buffer_filters
self.imports = imports
self.future_imports = future_imports
self.source_encoding = source_encoding
self.generate_magic_comment = generate_magic_comment
self.disable_unicode = disable_unicode
self.strict_undefined = strict_undefined
self.enable_loop = enable_loop
self.reserved_names = reserved_names
class _GenerateRenderMethod(object):
"""A template visitor object which generates the
full module source for a template.
"""
def __init__(self, printer, compiler, node):
self.printer = printer
self.compiler = compiler
self.node = node
self.identifier_stack = [None]
self.in_def = isinstance(node, (parsetree.DefTag, parsetree.BlockTag))
if self.in_def:
name = "render_%s" % node.funcname
args = node.get_argument_expressions()
filtered = len(node.filter_args.args) > 0
buffered = eval(node.attributes.get("buffered", "False"))
cached = eval(node.attributes.get("cached", "False"))
defs = None
pagetag = None
if node.is_block and not node.is_anonymous:
args += ["**pageargs"]
else:
defs = self.write_toplevel()
pagetag = self.compiler.pagetag
name = "render_body"
if pagetag is not None:
args = pagetag.body_decl.get_argument_expressions()
if not pagetag.body_decl.kwargs:
args += ["**pageargs"]
cached = eval(pagetag.attributes.get("cached", "False"))
self.compiler.enable_loop = self.compiler.enable_loop or eval(
pagetag.attributes.get("enable_loop", "False")
)
else:
args = ["**pageargs"]
cached = False
buffered = filtered = False
if args is None:
args = ["context"]
else:
args = [a for a in ["context"] + args]
self.write_render_callable(
pagetag or node, name, args, buffered, filtered, cached
)
if defs is not None:
for node in defs:
_GenerateRenderMethod(printer, compiler, node)
if not self.in_def:
self.write_metadata_struct()
def write_metadata_struct(self):
self.printer.source_map[self.printer.lineno] = max(
self.printer.source_map
)
struct = {
"filename": self.compiler.filename,
"uri": self.compiler.uri,
"source_encoding": self.compiler.source_encoding,
"line_map": self.printer.source_map,
}
self.printer.writelines(
'"""',
"__M_BEGIN_METADATA",
json.dumps(struct),
"__M_END_METADATA\n" '"""',
)
@property
def identifiers(self):
return self.identifier_stack[-1]
def write_toplevel(self):
"""Traverse a template structure for module-level directives and
generate the start of module-level code.
"""
inherit = []
namespaces = {}
module_code = []
self.compiler.pagetag = None
class FindTopLevel(object):
def visitInheritTag(s, node):
inherit.append(node)
def visitNamespaceTag(s, node):
namespaces[node.name] = node
def visitPageTag(s, node):
self.compiler.pagetag = node
def visitCode(s, node):
if node.ismodule:
module_code.append(node)
f = FindTopLevel()
for n in self.node.nodes:
n.accept_visitor(f)
self.compiler.namespaces = namespaces
module_ident = set()
for n in module_code:
module_ident = module_ident.union(n.declared_identifiers())
module_identifiers = _Identifiers(self.compiler)
module_identifiers.declared = module_ident
# module-level names, python code
if (
self.compiler.generate_magic_comment
and self.compiler.source_encoding
):
self.printer.writeline(
"# -*- coding:%s -*-" % self.compiler.source_encoding
)
if self.compiler.future_imports:
self.printer.writeline(
"from __future__ import %s"
% (", ".join(self.compiler.future_imports),)
)
self.printer.writeline("from mako import runtime, filters, cache")
self.printer.writeline("UNDEFINED = runtime.UNDEFINED")
self.printer.writeline("STOP_RENDERING = runtime.STOP_RENDERING")
self.printer.writeline("__M_dict_builtin = dict")
self.printer.writeline("__M_locals_builtin = locals")
self.printer.writeline("_magic_number = %r" % MAGIC_NUMBER)
self.printer.writeline("_modified_time = %r" % time.time())
self.printer.writeline("_enable_loop = %r" % self.compiler.enable_loop)
self.printer.writeline(
"_template_filename = %r" % self.compiler.filename
)
self.printer.writeline("_template_uri = %r" % self.compiler.uri)
self.printer.writeline(
"_source_encoding = %r" % self.compiler.source_encoding
)
if self.compiler.imports:
buf = ""
for imp in self.compiler.imports:
buf += imp + "\n"
self.printer.writeline(imp)
impcode = ast.PythonCode(
buf,
source="",
lineno=0,
pos=0,
filename="template defined imports",
)
else:
impcode = None
main_identifiers = module_identifiers.branch(self.node)
mit = module_identifiers.topleveldefs
module_identifiers.topleveldefs = mit.union(
main_identifiers.topleveldefs
)
module_identifiers.declared.update(TOPLEVEL_DECLARED)
if impcode:
module_identifiers.declared.update(impcode.declared_identifiers)
self.compiler.identifiers = module_identifiers
self.printer.writeline(
"_exports = %r"
% [n.name for n in main_identifiers.topleveldefs.values()]
)
self.printer.write_blanks(2)
if len(module_code):
self.write_module_code(module_code)
if len(inherit):
self.write_namespaces(namespaces)
self.write_inherit(inherit[-1])
elif len(namespaces):
self.write_namespaces(namespaces)
return list(main_identifiers.topleveldefs.values())
def write_render_callable(
self, node, name, args, buffered, filtered, cached
):
"""write a top-level render callable.
this could be the main render() method or that of a top-level def."""
if self.in_def:
decorator = node.decorator
if decorator:
self.printer.writeline(
"@runtime._decorate_toplevel(%s)" % decorator
)
self.printer.start_source(node.lineno)
self.printer.writelines(
"def %s(%s):" % (name, ",".join(args)),
# push new frame, assign current frame to __M_caller
"__M_caller = context.caller_stack._push_frame()",
"try:",
)
if buffered or filtered or cached:
self.printer.writeline("context._push_buffer()")
self.identifier_stack.append(
self.compiler.identifiers.branch(self.node)
)
if (not self.in_def or self.node.is_block) and "**pageargs" in args:
self.identifier_stack[-1].argument_declared.add("pageargs")
if not self.in_def and (
len(self.identifiers.locally_assigned) > 0
or len(self.identifiers.argument_declared) > 0
):
self.printer.writeline(
"__M_locals = __M_dict_builtin(%s)"
% ",".join(
[
"%s=%s" % (x, x)
for x in self.identifiers.argument_declared
]
)
)
self.write_variable_declares(self.identifiers, toplevel=True)
for n in self.node.nodes:
n.accept_visitor(self)
self.write_def_finish(self.node, buffered, filtered, cached)
self.printer.writeline(None)
self.printer.write_blanks(2)
if cached:
self.write_cache_decorator(
node, name, args, buffered, self.identifiers, toplevel=True
)
def write_module_code(self, module_code):
"""write module-level template code, i.e. that which
is enclosed in <%! %> tags in the template."""
for n in module_code:
self.printer.write_indented_block(n.text, starting_lineno=n.lineno)
def write_inherit(self, node):
"""write the module-level inheritance-determination callable."""
self.printer.writelines(
"def _mako_inherit(template, context):",
"_mako_generate_namespaces(context)",
"return runtime._inherit_from(context, %s, _template_uri)"
% (node.parsed_attributes["file"]),
None,
)
def write_namespaces(self, namespaces):
"""write the module-level namespace-generating callable."""
self.printer.writelines(
"def _mako_get_namespace(context, name):",
"try:",
"return context.namespaces[(__name__, name)]",
"except KeyError:",
"_mako_generate_namespaces(context)",
"return context.namespaces[(__name__, name)]",
None,
None,
)
self.printer.writeline("def _mako_generate_namespaces(context):")
for node in namespaces.values():
if "import" in node.attributes:
self.compiler.has_ns_imports = True
self.printer.start_source(node.lineno)
if len(node.nodes):
self.printer.writeline("def make_namespace():")
export = []
identifiers = self.compiler.identifiers.branch(node)
self.in_def = True
class NSDefVisitor(object):
def visitDefTag(s, node):
s.visitDefOrBase(node)
def visitBlockTag(s, node):
s.visitDefOrBase(node)
def visitDefOrBase(s, node):
if node.is_anonymous:
raise exceptions.CompileException(
"Can't put anonymous blocks inside "
"<%namespace>",
**node.exception_kwargs
)
self.write_inline_def(node, identifiers, nested=False)
export.append(node.funcname)
vis = NSDefVisitor()
for n in node.nodes:
n.accept_visitor(vis)
self.printer.writeline("return [%s]" % (",".join(export)))
self.printer.writeline(None)
self.in_def = False
callable_name = "make_namespace()"
else:
callable_name = "None"
if "file" in node.parsed_attributes:
self.printer.writeline(
"ns = runtime.TemplateNamespace(%r,"
" context._clean_inheritance_tokens(),"
" templateuri=%s, callables=%s, "
" calling_uri=_template_uri)"
% (
node.name,
node.parsed_attributes.get("file", "None"),
callable_name,
)
)
elif "module" in node.parsed_attributes:
self.printer.writeline(
"ns = runtime.ModuleNamespace(%r,"
" context._clean_inheritance_tokens(),"
" callables=%s, calling_uri=_template_uri,"
" module=%s)"
% (
node.name,
callable_name,
node.parsed_attributes.get("module", "None"),
)
)
else:
self.printer.writeline(
"ns = runtime.Namespace(%r,"
" context._clean_inheritance_tokens(),"
" callables=%s, calling_uri=_template_uri)"
% (node.name, callable_name)
)
if eval(node.attributes.get("inheritable", "False")):
self.printer.writeline("context['self'].%s = ns" % (node.name))
self.printer.writeline(
"context.namespaces[(__name__, %s)] = ns" % repr(node.name)
)
self.printer.write_blanks(1)
if not len(namespaces):
self.printer.writeline("pass")
self.printer.writeline(None)
def write_variable_declares(self, identifiers, toplevel=False, limit=None):
"""write variable declarations at the top of a function.
the variable declarations are in the form of callable
definitions for defs and/or name lookup within the
function's context argument. the names declared are based
on the names that are referenced in the function body,
which don't otherwise have any explicit assignment
operation. names that are assigned within the body are
assumed to be locally-scoped variables and are not
separately declared.
for def callable definitions, if the def is a top-level
callable then a 'stub' callable is generated which wraps
the current Context into a closure. if the def is not
top-level, it is fully rendered as a local closure.
"""
# collection of all defs available to us in this scope
comp_idents = dict([(c.funcname, c) for c in identifiers.defs])
to_write = set()
# write "context.get()" for all variables we are going to
# need that arent in the namespace yet
to_write = to_write.union(identifiers.undeclared)
# write closure functions for closures that we define
# right here
to_write = to_write.union(
[c.funcname for c in identifiers.closuredefs.values()]
)
# remove identifiers that are declared in the argument
# signature of the callable
to_write = to_write.difference(identifiers.argument_declared)
# remove identifiers that we are going to assign to.
# in this way we mimic Python's behavior,
# i.e. assignment to a variable within a block
# means that variable is now a "locally declared" var,
# which cannot be referenced beforehand.
to_write = to_write.difference(identifiers.locally_declared)
if self.compiler.enable_loop:
has_loop = "loop" in to_write
to_write.discard("loop")
else:
has_loop = False
# if a limiting set was sent, constraint to those items in that list
# (this is used for the caching decorator)
if limit is not None:
to_write = to_write.intersection(limit)
if toplevel and getattr(self.compiler, "has_ns_imports", False):
self.printer.writeline("_import_ns = {}")
self.compiler.has_imports = True
for ident, ns in self.compiler.namespaces.items():
if "import" in ns.attributes:
self.printer.writeline(
"_mako_get_namespace(context, %r)."
"_populate(_import_ns, %r)"
% (
ident,
re.split(r"\s*,\s*", ns.attributes["import"]),
)
)
if has_loop:
self.printer.writeline("loop = __M_loop = runtime.LoopStack()")
for ident in to_write:
if ident in comp_idents:
comp = comp_idents[ident]
if comp.is_block:
if not comp.is_anonymous:
self.write_def_decl(comp, identifiers)
else:
self.write_inline_def(comp, identifiers, nested=True)
else:
if comp.is_root():
self.write_def_decl(comp, identifiers)
else:
self.write_inline_def(comp, identifiers, nested=True)
elif ident in self.compiler.namespaces:
self.printer.writeline(
"%s = _mako_get_namespace(context, %r)" % (ident, ident)
)
else:
if getattr(self.compiler, "has_ns_imports", False):
if self.compiler.strict_undefined:
self.printer.writelines(
"%s = _import_ns.get(%r, UNDEFINED)"
% (ident, ident),
"if %s is UNDEFINED:" % ident,
"try:",
"%s = context[%r]" % (ident, ident),
"except KeyError:",
"raise NameError(\"'%s' is not defined\")" % ident,
None,
None,
)
else:
self.printer.writeline(
"%s = _import_ns.get"
"(%r, context.get(%r, UNDEFINED))"
% (ident, ident, ident)
)
else:
if self.compiler.strict_undefined:
self.printer.writelines(
"try:",
"%s = context[%r]" % (ident, ident),
"except KeyError:",
"raise NameError(\"'%s' is not defined\")" % ident,
None,
)
else:
self.printer.writeline(
"%s = context.get(%r, UNDEFINED)" % (ident, ident)
)
self.printer.writeline("__M_writer = context.writer()")
def write_def_decl(self, node, identifiers):
"""write a locally-available callable referencing a top-level def"""
funcname = node.funcname
namedecls = node.get_argument_expressions()
nameargs = node.get_argument_expressions(as_call=True)
if not self.in_def and (
len(self.identifiers.locally_assigned) > 0
or len(self.identifiers.argument_declared) > 0
):
nameargs.insert(0, "context._locals(__M_locals)")
else:
nameargs.insert(0, "context")
self.printer.writeline("def %s(%s):" % (funcname, ",".join(namedecls)))
self.printer.writeline(
"return render_%s(%s)" % (funcname, ",".join(nameargs))
)
self.printer.writeline(None)
def write_inline_def(self, node, identifiers, nested):
"""write a locally-available def callable inside an enclosing def."""
namedecls = node.get_argument_expressions()
decorator = node.decorator
if decorator:
self.printer.writeline(
"@runtime._decorate_inline(context, %s)" % decorator
)
self.printer.writeline(
"def %s(%s):" % (node.funcname, ",".join(namedecls))
)
filtered = len(node.filter_args.args) > 0
buffered = eval(node.attributes.get("buffered", "False"))
cached = eval(node.attributes.get("cached", "False"))
self.printer.writelines(
# push new frame, assign current frame to __M_caller
"__M_caller = context.caller_stack._push_frame()",
"try:",
)
if buffered or filtered or cached:
self.printer.writelines("context._push_buffer()")
identifiers = identifiers.branch(node, nested=nested)
self.write_variable_declares(identifiers)
self.identifier_stack.append(identifiers)
for n in node.nodes:
n.accept_visitor(self)
self.identifier_stack.pop()
self.write_def_finish(node, buffered, filtered, cached)
self.printer.writeline(None)
if cached:
self.write_cache_decorator(
node,
node.funcname,
namedecls,
False,
identifiers,
inline=True,
toplevel=False,
)
def write_def_finish(
self, node, buffered, filtered, cached, callstack=True
):
"""write the end section of a rendering function, either outermost or
inline.
this takes into account if the rendering function was filtered,
buffered, etc. and closes the corresponding try: block if any, and
writes code to retrieve captured content, apply filters, send proper
return value."""
if not buffered and not cached and not filtered:
self.printer.writeline("return ''")
if callstack:
self.printer.writelines(
"finally:", "context.caller_stack._pop_frame()", None
)
if buffered or filtered or cached:
if buffered or cached:
# in a caching scenario, don't try to get a writer
# from the context after popping; assume the caching
# implemenation might be using a context with no
# extra buffers
self.printer.writelines(
"finally:", "__M_buf = context._pop_buffer()"
)
else:
self.printer.writelines(
"finally:",
"__M_buf, __M_writer = context._pop_buffer_and_writer()",
)
if callstack:
self.printer.writeline("context.caller_stack._pop_frame()")
s = "__M_buf.getvalue()"
if filtered:
s = self.create_filter_callable(
node.filter_args.args, s, False
)
self.printer.writeline(None)
if buffered and not cached:
s = self.create_filter_callable(
self.compiler.buffer_filters, s, False
)
if buffered or cached:
self.printer.writeline("return %s" % s)
else:
self.printer.writelines("__M_writer(%s)" % s, "return ''")
def write_cache_decorator(
self,
node_or_pagetag,
name,
args,
buffered,
identifiers,
inline=False,
toplevel=False,
):
"""write a post-function decorator to replace a rendering
callable with a cached version of itself."""
self.printer.writeline("__M_%s = %s" % (name, name))
cachekey = node_or_pagetag.parsed_attributes.get(
"cache_key", repr(name)
)
cache_args = {}
if self.compiler.pagetag is not None:
cache_args.update(
(pa[6:], self.compiler.pagetag.parsed_attributes[pa])
for pa in self.compiler.pagetag.parsed_attributes
if pa.startswith("cache_") and pa != "cache_key"
)
cache_args.update(
(pa[6:], node_or_pagetag.parsed_attributes[pa])
for pa in node_or_pagetag.parsed_attributes
if pa.startswith("cache_") and pa != "cache_key"
)
if "timeout" in cache_args:
cache_args["timeout"] = int(eval(cache_args["timeout"]))
self.printer.writeline("def %s(%s):" % (name, ",".join(args)))
# form "arg1, arg2, arg3=arg3, arg4=arg4", etc.
pass_args = [
"%s=%s" % ((a.split("=")[0],) * 2) if "=" in a else a for a in args
]
self.write_variable_declares(
identifiers,
toplevel=toplevel,
limit=node_or_pagetag.undeclared_identifiers(),
)
if buffered:
s = (
"context.get('local')."
"cache._ctx_get_or_create("
"%s, lambda:__M_%s(%s), context, %s__M_defname=%r)"
% (
cachekey,
name,
",".join(pass_args),
"".join(
["%s=%s, " % (k, v) for k, v in cache_args.items()]
),
name,
)
)
# apply buffer_filters
s = self.create_filter_callable(
self.compiler.buffer_filters, s, False
)
self.printer.writelines("return " + s, None)
else:
self.printer.writelines(
"__M_writer(context.get('local')."
"cache._ctx_get_or_create("
"%s, lambda:__M_%s(%s), context, %s__M_defname=%r))"
% (
cachekey,
name,
",".join(pass_args),
"".join(
["%s=%s, " % (k, v) for k, v in cache_args.items()]
),
name,
),
"return ''",
None,
)
def create_filter_callable(self, args, target, is_expression):
"""write a filter-applying expression based on the filters
present in the given filter names, adjusting for the global
'default' filter aliases as needed."""
def locate_encode(name):
if re.match(r"decode\..+", name):
return "filters." + name
elif self.compiler.disable_unicode:
return filters.NON_UNICODE_ESCAPES.get(name, name)
else:
return filters.DEFAULT_ESCAPES.get(name, name)
if "n" not in args:
if is_expression:
if self.compiler.pagetag:
args = self.compiler.pagetag.filter_args.args + args
if self.compiler.default_filters and "n" not in args:
args = self.compiler.default_filters + args
for e in args:
# if filter given as a function, get just the identifier portion
if e == "n":
continue
m = re.match(r"(.+?)(\(.*\))", e)
if m:
ident, fargs = m.group(1, 2)
f = locate_encode(ident)
e = f + fargs
else:
e = locate_encode(e)
assert e is not None
target = "%s(%s)" % (e, target)
return target
def visitExpression(self, node):
self.printer.start_source(node.lineno)
if (
len(node.escapes)
or (
self.compiler.pagetag is not None
and len(self.compiler.pagetag.filter_args.args)
)
or len(self.compiler.default_filters)
):
s = self.create_filter_callable(
node.escapes_code.args, "%s" % node.text, True
)
self.printer.writeline("__M_writer(%s)" % s)
else:
self.printer.writeline("__M_writer(%s)" % node.text)
def visitControlLine(self, node):
if node.isend:
self.printer.writeline(None)
if node.has_loop_context:
self.printer.writeline("finally:")
self.printer.writeline("loop = __M_loop._exit()")
self.printer.writeline(None)
else:
self.printer.start_source(node.lineno)
if self.compiler.enable_loop and node.keyword == "for":
text = mangle_mako_loop(node, self.printer)
else:
text = node.text
self.printer.writeline(text)
children = node.get_children()
# this covers the three situations where we want to insert a pass:
# 1) a ternary control line with no children,
# 2) a primary control line with nothing but its own ternary
# and end control lines, and
# 3) any control line with no content other than comments
if not children or (
compat.all(
isinstance(c, (parsetree.Comment, parsetree.ControlLine))
for c in children
)
and compat.all(
(node.is_ternary(c.keyword) or c.isend)
for c in children
if isinstance(c, parsetree.ControlLine)
)
):
self.printer.writeline("pass")
def visitText(self, node):
self.printer.start_source(node.lineno)
self.printer.writeline("__M_writer(%s)" % repr(node.content))
def visitTextTag(self, node):
filtered = len(node.filter_args.args) > 0
if filtered:
self.printer.writelines(
"__M_writer = context._push_writer()", "try:"
)
for n in node.nodes:
n.accept_visitor(self)
if filtered:
self.printer.writelines(
"finally:",
"__M_buf, __M_writer = context._pop_buffer_and_writer()",
"__M_writer(%s)"
% self.create_filter_callable(
node.filter_args.args, "__M_buf.getvalue()", False
),
None,
)
def visitCode(self, node):
if not node.ismodule:
self.printer.write_indented_block(
node.text, starting_lineno=node.lineno
)
if not self.in_def and len(self.identifiers.locally_assigned) > 0:
# if we are the "template" def, fudge locally
# declared/modified variables into the "__M_locals" dictionary,
# which is used for def calls within the same template,
# to simulate "enclosing scope"
self.printer.writeline(
"__M_locals_builtin_stored = __M_locals_builtin()"
)
self.printer.writeline(
"__M_locals.update(__M_dict_builtin([(__M_key,"
" __M_locals_builtin_stored[__M_key]) for __M_key in"
" [%s] if __M_key in __M_locals_builtin_stored]))"
% ",".join([repr(x) for x in node.declared_identifiers()])
)
def visitIncludeTag(self, node):
self.printer.start_source(node.lineno)
args = node.attributes.get("args")
if args:
self.printer.writeline(
"runtime._include_file(context, %s, _template_uri, %s)"
% (node.parsed_attributes["file"], args)
)
else:
self.printer.writeline(
"runtime._include_file(context, %s, _template_uri)"
% (node.parsed_attributes["file"])
)
def visitNamespaceTag(self, node):
pass
def visitDefTag(self, node):
pass
def visitBlockTag(self, node):
if node.is_anonymous:
self.printer.writeline("%s()" % node.funcname)
else:
nameargs = node.get_argument_expressions(as_call=True)
nameargs += ["**pageargs"]
self.printer.writeline(
"if 'parent' not in context._data or "
"not hasattr(context._data['parent'], '%s'):" % node.funcname
)
self.printer.writeline(
"context['self'].%s(%s)" % (node.funcname, ",".join(nameargs))
)
self.printer.writeline("\n")
def visitCallNamespaceTag(self, node):
# TODO: we can put namespace-specific checks here, such
# as ensure the given namespace will be imported,
# pre-import the namespace, etc.
self.visitCallTag(node)
def visitCallTag(self, node):
self.printer.writeline("def ccall(caller):")
export = ["body"]
callable_identifiers = self.identifiers.branch(node, nested=True)
body_identifiers = callable_identifiers.branch(node, nested=False)
# we want the 'caller' passed to ccall to be used
# for the body() function, but for other non-body()
# <%def>s within <%call> we want the current caller
# off the call stack (if any)
body_identifiers.add_declared("caller")
self.identifier_stack.append(body_identifiers)
class DefVisitor(object):
def visitDefTag(s, node):
s.visitDefOrBase(node)
def visitBlockTag(s, node):
s.visitDefOrBase(node)
def visitDefOrBase(s, node):
self.write_inline_def(node, callable_identifiers, nested=False)
if not node.is_anonymous:
export.append(node.funcname)
# remove defs that are within the <%call> from the
# "closuredefs" defined in the body, so they dont render twice
if node.funcname in body_identifiers.closuredefs:
del body_identifiers.closuredefs[node.funcname]
vis = DefVisitor()
for n in node.nodes:
n.accept_visitor(vis)
self.identifier_stack.pop()
bodyargs = node.body_decl.get_argument_expressions()
self.printer.writeline("def body(%s):" % ",".join(bodyargs))
# TODO: figure out best way to specify
# buffering/nonbuffering (at call time would be better)
buffered = False
if buffered:
self.printer.writelines("context._push_buffer()", "try:")
self.write_variable_declares(body_identifiers)
self.identifier_stack.append(body_identifiers)
for n in node.nodes:
n.accept_visitor(self)
self.identifier_stack.pop()
self.write_def_finish(node, buffered, False, False, callstack=False)
self.printer.writelines(None, "return [%s]" % (",".join(export)), None)
self.printer.writelines(
# push on caller for nested call
"context.caller_stack.nextcaller = "
"runtime.Namespace('caller', context, "
"callables=ccall(__M_caller))",
"try:",
)
self.printer.start_source(node.lineno)
self.printer.writelines(
"__M_writer(%s)"
% self.create_filter_callable([], node.expression, True),
"finally:",
"context.caller_stack.nextcaller = None",
None,
)
class _Identifiers(object):
"""tracks the status of identifier names as template code is rendered."""
def __init__(self, compiler, node=None, parent=None, nested=False):
if parent is not None:
# if we are the branch created in write_namespaces(),
# we don't share any context from the main body().
if isinstance(node, parsetree.NamespaceTag):
self.declared = set()
self.topleveldefs = util.SetLikeDict()
else:
# things that have already been declared
# in an enclosing namespace (i.e. names we can just use)
self.declared = (
set(parent.declared)
.union([c.name for c in parent.closuredefs.values()])
.union(parent.locally_declared)
.union(parent.argument_declared)
)
# if these identifiers correspond to a "nested"
# scope, it means whatever the parent identifiers
# had as undeclared will have been declared by that parent,
# and therefore we have them in our scope.
if nested:
self.declared = self.declared.union(parent.undeclared)
# top level defs that are available
self.topleveldefs = util.SetLikeDict(**parent.topleveldefs)
else:
self.declared = set()
self.topleveldefs = util.SetLikeDict()
self.compiler = compiler
# things within this level that are referenced before they
# are declared (e.g. assigned to)
self.undeclared = set()
# things that are declared locally. some of these things
# could be in the "undeclared" list as well if they are
# referenced before declared
self.locally_declared = set()
# assignments made in explicit python blocks.
# these will be propagated to
# the context of local def calls.
self.locally_assigned = set()
# things that are declared in the argument
# signature of the def callable
self.argument_declared = set()
# closure defs that are defined in this level
self.closuredefs = util.SetLikeDict()
self.node = node
if node is not None:
node.accept_visitor(self)
illegal_names = self.compiler.reserved_names.intersection(
self.locally_declared
)
if illegal_names:
raise exceptions.NameConflictError(
"Reserved words declared in template: %s"
% ", ".join(illegal_names)
)
def branch(self, node, **kwargs):
"""create a new Identifiers for a new Node, with
this Identifiers as the parent."""
return _Identifiers(self.compiler, node, self, **kwargs)
@property
def defs(self):
return set(self.topleveldefs.union(self.closuredefs).values())
def __repr__(self):
return (
"Identifiers(declared=%r, locally_declared=%r, "
"undeclared=%r, topleveldefs=%r, closuredefs=%r, "
"argumentdeclared=%r)"
% (
list(self.declared),
list(self.locally_declared),
list(self.undeclared),
[c.name for c in self.topleveldefs.values()],
[c.name for c in self.closuredefs.values()],
self.argument_declared,
)
)
def check_declared(self, node):
"""update the state of this Identifiers with the undeclared
and declared identifiers of the given node."""
for ident in node.undeclared_identifiers():
if ident != "context" and ident not in self.declared.union(
self.locally_declared
):
self.undeclared.add(ident)
for ident in node.declared_identifiers():
self.locally_declared.add(ident)
def add_declared(self, ident):
self.declared.add(ident)
if ident in self.undeclared:
self.undeclared.remove(ident)
def visitExpression(self, node):
self.check_declared(node)
def visitControlLine(self, node):
self.check_declared(node)
def visitCode(self, node):
if not node.ismodule:
self.check_declared(node)
self.locally_assigned = self.locally_assigned.union(
node.declared_identifiers()
)
def visitNamespaceTag(self, node):
# only traverse into the sub-elements of a
# <%namespace> tag if we are the branch created in
# write_namespaces()
if self.node is node:
for n in node.nodes:
n.accept_visitor(self)
def _check_name_exists(self, collection, node):
existing = collection.get(node.funcname)
collection[node.funcname] = node
if (
existing is not None
and existing is not node
and (node.is_block or existing.is_block)
):
raise exceptions.CompileException(
"%%def or %%block named '%s' already "
"exists in this template." % node.funcname,
**node.exception_kwargs
)
def visitDefTag(self, node):
if node.is_root() and not node.is_anonymous:
self._check_name_exists(self.topleveldefs, node)
elif node is not self.node:
self._check_name_exists(self.closuredefs, node)
for ident in node.undeclared_identifiers():
if ident != "context" and ident not in self.declared.union(
self.locally_declared
):
self.undeclared.add(ident)
# visit defs only one level deep
if node is self.node:
for ident in node.declared_identifiers():
self.argument_declared.add(ident)
for n in node.nodes:
n.accept_visitor(self)
def visitBlockTag(self, node):
if node is not self.node and not node.is_anonymous:
if isinstance(self.node, parsetree.DefTag):
raise exceptions.CompileException(
"Named block '%s' not allowed inside of def '%s'"
% (node.name, self.node.name),
**node.exception_kwargs
)
elif isinstance(
self.node, (parsetree.CallTag, parsetree.CallNamespaceTag)
):
raise exceptions.CompileException(
"Named block '%s' not allowed inside of <%%call> tag"
% (node.name,),
**node.exception_kwargs
)
for ident in node.undeclared_identifiers():
if ident != "context" and ident not in self.declared.union(
self.locally_declared
):
self.undeclared.add(ident)
if not node.is_anonymous:
self._check_name_exists(self.topleveldefs, node)
self.undeclared.add(node.funcname)
elif node is not self.node:
self._check_name_exists(self.closuredefs, node)
for ident in node.declared_identifiers():
self.argument_declared.add(ident)
for n in node.nodes:
n.accept_visitor(self)
def visitTextTag(self, node):
for ident in node.undeclared_identifiers():
if ident != "context" and ident not in self.declared.union(
self.locally_declared
):
self.undeclared.add(ident)
def visitIncludeTag(self, node):
self.check_declared(node)
def visitPageTag(self, node):
for ident in node.declared_identifiers():
self.argument_declared.add(ident)
self.check_declared(node)
def visitCallNamespaceTag(self, node):
self.visitCallTag(node)
def visitCallTag(self, node):
if node is self.node:
for ident in node.undeclared_identifiers():
if ident != "context" and ident not in self.declared.union(
self.locally_declared
):
self.undeclared.add(ident)
for ident in node.declared_identifiers():
self.argument_declared.add(ident)
for n in node.nodes:
n.accept_visitor(self)
else:
for ident in node.undeclared_identifiers():
if ident != "context" and ident not in self.declared.union(
self.locally_declared
):
self.undeclared.add(ident)
_FOR_LOOP = re.compile(
r"^for\s+((?:\(?)\s*[A-Za-z_][A-Za-z_0-9]*"
r"(?:\s*,\s*(?:[A-Za-z_][A-Za-z0-9_]*),??)*\s*(?:\)?))\s+in\s+(.*):"
)
def mangle_mako_loop(node, printer):
"""converts a for loop into a context manager wrapped around a for loop
when access to the `loop` variable has been detected in the for loop body
"""
loop_variable = LoopVariable()
node.accept_visitor(loop_variable)
if loop_variable.detected:
node.nodes[-1].has_loop_context = True
match = _FOR_LOOP.match(node.text)
if match:
printer.writelines(
"loop = __M_loop._enter(%s)" % match.group(2),
"try:"
# 'with __M_loop(%s) as loop:' % match.group(2)
)
text = "for %s in loop:" % match.group(1)
else:
raise SyntaxError("Couldn't apply loop context: %s" % node.text)
else:
text = node.text
return text
class LoopVariable(object):
"""A node visitor which looks for the name 'loop' within undeclared
identifiers."""
def __init__(self):
self.detected = False
def _loop_reference_detected(self, node):
if "loop" in node.undeclared_identifiers():
self.detected = True
else:
for n in node.get_children():
n.accept_visitor(self)
def visitControlLine(self, node):
self._loop_reference_detected(node)
def visitCode(self, node):
self._loop_reference_detected(node)
def visitExpression(self, node):
self._loop_reference_detected(node)
| wujuguang/mako | mako/codegen.py | Python | mit | 47,892 |
Xktta.I18n = function(locale, translations){
this.translations[locale] = translations || {};
return this;
}
Xktta.afterInit.push(function(){
var __this__ = Xktta;
eval.call(__this__.window, "var I18n;");
I18n = {
locale: 'en',
translate: function(path, params){
var translation = path;
var translations = __this__.translations[I18n.locale] || {};
path.split('.').forEach(function(key){
translations = translations[key] || {};
});
if(typeof translations === 'string'){
translation = translations;
params = params || {};
translation = translation.interpolate(params, '%');
}else if(typeof translations === 'object' && translations.constructor.name === 'Array'){
translation = translations;
} else {
translation = '#{locale}.#{path}'.interpolate({locale: I18n.locale, path: path});
}
return translation;
},
localize: function(value, options){
options = options || {};
var format = options.format || 'default';
var dateType = options.dateType || 'date';
var formatted = value;
if(typeof value === 'object' && value.constructor.name === 'Date'){
var dayWeak = value.getDay();
var month = value.getMonth() + 1;
var dayMonth = value.getDate();
var year = value.getFullYear();
var hours = value.getHours();
var minutes = value.getMinutes();
var seconds = value.getSeconds();
var meridiem = hours < 12 ? 'am' : 'pm';
var zone = value.toString().match(/([A-Z]+[\+-][0-9]+.*)/)[1];
formatted = __this__.translations[I18n.locale][dateType].formats[format];
var formatBy = {
function: function(){
formatted = formatted(value);
},
string: function(){
var to = {
date: function(){
formatted = formatted
.interpolate({
a: I18n.t('date.abbrDayNames')[dayWeak],
A: I18n.t('date.dayNames')[dayWeak],
m: new String(month + 100).toString().substr(1),
b: I18n.t('date.abbrMonthNames')[month],
B: I18n.t('date.monthNames')[month],
d: new String(dayMonth + 100).toString().substr(1),
Y: year
}, '%', false);
},
time: function(){
formatted = formatted
.interpolate({
h: new String( (hours || 24) - 12 + 100 ).toString().substr(1),
H: new String(hours + 100).toString().substr(1),
M: new String(minutes + 100).toString().substr(1),
S: new String(seconds + 100).toString().substr(1),
p: I18n.t(['time', meridiem].join('.')),
z: zone
}, '%', false);
},
datetime: function(){
this.date();
this.time();
}
}
to[dateType]();
}
}
formatBy[typeof formatted]();
}
else if(typeof value === 'number'){
var functionFormat = __this__.translations[I18n.locale].integer.formats[format];
if(/\./.test(value) || options.forceDecimal){
functionFormat = __this__.translations[I18n.locale].decimal.formats[format];
}
formatted = functionFormat(value);
}
else if(typeof value === 'boolean'){
formatted = __this__.translations[I18n.locale].logic.formats[format][value];
}
return formatted;
}
}
I18n.t = I18n.translate
I18n.l = I18n.localize
}); | juniormesquitadandao/xikitita | app/models/i18n.js | JavaScript | mit | 3,738 |
#!/usr/bin/env python
from circuits.web import Server, JSONController
class Root(JSONController):
def index(self):
return {"success": True, "message": "Hello World!"}
app = Server(("0.0.0.0", 8000))
Root().register(app)
app.run()
| nizox/circuits | examples/web/jsoncontroller.py | Python | mit | 247 |
//
// Copyright (C) 2014, 2015 Ableton AG, Berlin. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
// THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
//
/*!
* @file
*/
#pragma once
#include <atria/estd/type_traits.hpp>
#include <atria/meta/utils.hpp>
#include <utility>
namespace atria {
namespace funken {
namespace detail {
/*!
* Provides access to the underlying signals of different
* entities. This encapsulates acess to the implementation of
* signal-based objects: don't make access to signals public, instead
* friend this class.
*/
class access
{
public:
/*!
* Returns a smart pointer to the underlying root signal or signals
* of an object, if exist.
*/
template<typename T>
static auto roots(T&& object)
-> ABL_DECLTYPE_RETURN(
std::forward<T>(object).roots())
/*!
* Returns a pointer to th underlying signal of an object, if
* exists.
*/
template<typename T>
static auto signal(T&& object)
-> ABL_DECLTYPE_RETURN(
std::forward<T>(object).signal())
/*!
* Returns a a optional boost.signal to the specific watchers of
* the underlying signal of an object.
*/
template<typename T>
static auto watchers(T&& object)
-> ABL_DECLTYPE_RETURN(
std::forward<T>(object).watchers())
};
/*!
* Returns the signal type for an object
*/
template <typename ObjectT>
struct signal_type
{
using type =
estd::decay_t<
typename estd::decay_t<decltype(
access::signal(std::declval<ObjectT>())
)>::element_type
>;
};
template <typename ObjectT>
using signal_type_t = typename signal_type<ObjectT>::type;
} // namespace detail
} // namespace funken
} // namespace atria
| dodheim/atria | src/atria/funken/detail/access.hpp | C++ | mit | 2,682 |
import Ember from 'ember';
export default Ember.Handlebars.template(function anonymous(Handlebars,depth0,helpers,partials,data) {
this.compilerInfo = [4,'>= 1.0.0'];
helpers = this.merge(helpers, Ember.Handlebars.helpers); data = data || {};
var buffer = '', stack1, helperMissing=helpers.helperMissing, escapeExpression=this.escapeExpression, self=this;
function program1(depth0,data) {
var buffer = '', helper, options;
data.buffer.push("\n <div class=\"row\">\n <div class=\"col-md-4\"> <span class=\"glyphicon glyphicon-user\"></span> ");
data.buffer.push(escapeExpression((helper = helpers['link-to'] || (depth0 && depth0['link-to']),options={hash:{},hashTypes:{},hashContexts:{},contexts:[depth0,depth0,depth0],types:["ID","STRING","ID"],data:data},helper ? helper.call(depth0, "user.name", "users.user", "user", options) : helperMissing.call(depth0, "link-to", "user.name", "users.user", "user", options))));
data.buffer.push("</div>\n </div>\n ");
return buffer;
}
data.buffer.push("<h2>Buddies</h2>\n\n<div class=\"container\">\n ");
stack1 = helpers.each.call(depth0, "user", "in", "content", {hash:{},hashTypes:{},hashContexts:{},inverse:self.noop,fn:self.program(1, program1, data),contexts:[depth0,depth0,depth0],types:["ID","ID","ID"],data:data});
if(stack1 || stack1 === 0) { data.buffer.push(stack1); }
data.buffer.push("\n\n\n</div>\n");
return buffer;
});
| alicht/buddybuddy | public/tmp/tree_merger-tmp_dest_dir-KPkHvOkI.tmp/buddybuddy/templates/users/index.js | JavaScript | mit | 1,413 |
exports.defaultType = require('ot-json0').type;
exports.map = {};
exports.register = function(type) {
if (type.name) exports.map[type.name] = type;
if (type.uri) exports.map[type.uri] = type;
};
exports.register(exports.defaultType);
| share/livedb | lib/types.js | JavaScript | mit | 242 |
using System;
using System.Threading;
using Titanium.Web.Proxy.Helpers;
using Titanium.Web.Proxy.Http;
using Titanium.Web.Proxy.Models;
using Titanium.Web.Proxy.StreamExtended.Network;
namespace Titanium.Web.Proxy.EventArguments
{
/// <summary>
/// A class that wraps the state when a tunnel connect event happen for Explicit endpoints.
/// </summary>
public class TunnelConnectSessionEventArgs : SessionEventArgsBase
{
private bool? isHttpsConnect;
internal TunnelConnectSessionEventArgs(ProxyServer server, ProxyEndPoint endPoint, ConnectRequest connectRequest,
HttpClientStream clientStream, CancellationTokenSource cancellationTokenSource)
: base(server, endPoint, clientStream, connectRequest, connectRequest, cancellationTokenSource)
{
}
/// <summary>
/// Should we decrypt the Ssl or relay it to server?
/// Default is true.
/// </summary>
public bool DecryptSsl { get; set; } = true;
/// <summary>
/// When set to true it denies the connect request with a Forbidden status.
/// </summary>
public bool DenyConnect { get; set; }
/// <summary>
/// Is this a connect request to secure HTTP server? Or is it to some other protocol.
/// </summary>
public bool IsHttpsConnect
{
get => isHttpsConnect ??
throw new Exception("The value of this property is known in the BeforeTunnelConnectResponse event");
internal set => isHttpsConnect = value;
}
/// <summary>
/// Fired when decrypted data is sent within this session to server/client.
/// </summary>
public event EventHandler<DataEventArgs>? DecryptedDataSent;
/// <summary>
/// Fired when decrypted data is received within this session from client/server.
/// </summary>
public event EventHandler<DataEventArgs>? DecryptedDataReceived;
internal void OnDecryptedDataSent(byte[] buffer, int offset, int count)
{
try
{
DecryptedDataSent?.Invoke(this, new DataEventArgs(buffer, offset, count));
}
catch (Exception ex)
{
ExceptionFunc(new Exception("Exception thrown in user event", ex));
}
}
internal void OnDecryptedDataReceived(byte[] buffer, int offset, int count)
{
try
{
DecryptedDataReceived?.Invoke(this, new DataEventArgs(buffer, offset, count));
}
catch (Exception ex)
{
ExceptionFunc(new Exception("Exception thrown in user event", ex));
}
}
}
}
| titanium007/Titanium | src/Titanium.Web.Proxy/EventArguments/TunnelConnectEventArgs.cs | C# | mit | 2,809 |
from __future__ import print_function
import time
import pickle
import time
import numpy as np
import scipy.optimize, scipy.ndimage
from acq4.util import Qt
import acq4.pyqtgraph as pg
from acq4.Manager import getManager
class PipetteTracker(object):
"""Provides functionality for automated tracking and recalibration of pipette tip position
based on camera feedback.
The current implementation uses normalized cross-correlation to do template matching against
a stack of reference images collected with `takeReferenceFrames()`.
"""
def __init__(self, pipette):
self.dev = pipette
fileName = self.dev.configFileName('ref_frames.pk')
try:
self.reference = pickle.load(open(fileName, 'rb'))
except Exception:
self.reference = {}
def takeFrame(self, imager=None):
"""Acquire one frame from an imaging device.
This method guarantees that the frame is exposed *after* this method is called.
"""
imager = self._getImager(imager)
restart = False
if imager.isRunning():
restart = True
imager.stop()
frame = imager.acquireFrames(1)
if restart:
imager.start()
return frame
def getNextFrame(self, imager=None):
"""Return the next frame available from the imager.
Note: the frame may have been exposed before this method was called.
"""
imager = self._getImager(imager)
self.__nextFrame = None
def newFrame(newFrame):
self.__nextFrame = newFrame
imager.sigNewFrame.connect(newFrame)
try:
start = pg.ptime.time()
while pg.ptime.time() < start + 5.0:
Qt.QApplication.processEvents()
frame = self.__nextFrame
if frame is not None:
self.__nextFrame = None
return frame
time.sleep(0.01)
raise RuntimeError("Did not receive frame from imager.")
finally:
pg.disconnect(imager.sigNewFrame, newFrame)
def _getImager(self, imager=None):
if imager is None:
imager = 'Camera'
if isinstance(imager, str):
man = getManager()
imager = man.getDevice('Camera')
return imager
def getTipImageArea(self, frame, padding, pos=None, tipLength=None):
"""Generate coordinates needed to clip a camera frame to include just the
tip of the pipette and some padding.
By default, images will include the tip of the pipette to a length of 100 pixels.
Return a tuple (minImgPos, maxImgPos, tipRelPos), where the first two
items are (x,y) coordinate pairs giving the corners of the image region to
be extracted, and tipRelPos is the subpixel location of the pipette tip
within this region.
"""
img = frame.data()
if img.ndim == 3:
img = img[0]
if tipLength is None:
tipLength = self.suggestTipLength(frame)
# determine bounding rectangle that we would like to acquire from the tip
if pos is not None:
tipPos = pos
else:
tipPos = self.dev.globalPosition()
tipPos = np.array([tipPos[0], tipPos[1]])
angle = self.dev.getYawAngle() * np.pi / 180.
da = 10 * np.pi / 180 # half-angle of the tip
pxw = frame.info()['pixelSize'][0]
# compute back points of a triangle that circumscribes the tip
backPos1 = np.array([-tipLength * np.cos(angle+da), -tipLength * np.sin(angle+da)])
backPos2 = np.array([-tipLength * np.cos(angle-da), -tipLength * np.sin(angle-da)])
# convert to image coordinates
tr = frame.globalTransform().inverted()[0]
originImgPos = tr.map(pg.Vector([0, 0]))
backImgPos1 = tr.map(pg.Vector(backPos1)) - originImgPos
backImgPos2 = tr.map(pg.Vector(backPos2)) - originImgPos
backImgPos1 = np.array([backImgPos1.x(), backImgPos1.y()])
backImgPos2 = np.array([backImgPos2.x(), backImgPos2.y()])
# Pixel positions of bounding corners in the image relative to tip, including padding.
# Note this is all calculated without actual tip position; this ensures the image
# size is constant even as the tip moves.
allPos = np.vstack([[0, 0], backImgPos1, backImgPos2]).astype('int')
padding = int(padding / pxw)
minRelPos = allPos.min(axis=0) - padding
maxRelPos = allPos.max(axis=0) + padding
# Get absolute pixel position of tip within image
tipImgPos = tr.map(pg.Vector(tipPos))
tipImgPos = np.array([tipImgPos.x(), tipImgPos.y()])
tipImgPx = tipImgPos.astype('int')
# clip bounding coordinates
minRelPos = [np.clip(minRelPos[0], -tipImgPx[0], img.shape[0]-1-tipImgPx[0]),
np.clip(minRelPos[1], -tipImgPx[1], img.shape[1]-1-tipImgPx[1])]
maxRelPos = [np.clip(maxRelPos[0], -tipImgPx[0], img.shape[0]-1-tipImgPx[0]),
np.clip(maxRelPos[1], -tipImgPx[1], img.shape[1]-1-tipImgPx[1])]
# absolute image coordinates of bounding rect
minImgPos = tipImgPx + minRelPos
maxImgPos = tipImgPx + maxRelPos
if np.any(maxImgPos - minImgPos < 1):
raise RuntimeError("No part of tip overlaps with camera frame.")
# subpixel location of tip within image
tipRelPos = tipImgPos - tipImgPx - minRelPos
return minImgPos, maxImgPos, tipRelPos
def takeTipImage(self, padding=50e-6):
"""Acquire an image of the pipette tip plus some padding.
Return a tuple (image, tipPosition).
"""
frame = self.takeFrame()
minImgPos, maxImgPos, tipRelPos = self.getTipImageArea(frame, padding)
# clipped image region
subimg = frame.data()[0, minImgPos[0]:maxImgPos[0], minImgPos[1]:maxImgPos[1]]
return subimg, tipRelPos
def suggestTipLength(self, frame):
# return a suggested tip length to image, given the image resolution
# currently just returns the length of 100 pixels in the frame
return frame.info()['pixelSize'][0] * 100
def takeReferenceFrames(self, zRange=None, zStep=None, imager=None, average=8, tipLength=None):
"""Collect a series of images of the pipette tip at various focal depths.
The collected images are used as reference templates for determining the most likely location
and focal depth of the tip after the calibration is no longer valid.
The focus first is moved in +z by half of *zRange*, then stepped downward by *zStep* until the
entire *zRange* is covered. Images of the pipette tip are acquired and stored at each step.
This method assumes that the tip is in focus near the center of the camera frame, and that its
position is well-calibrated. Ideally, the illumination is flat and the area surrounding the tip
is free of any artifacts.
Images are filtered using `self.filterImage` before they are stored.
"""
imager = self._getImager(imager)
# Take an initial frame with the tip in focus.
centerFrame = self.takeFrame()
if tipLength is None:
tipLength = self.suggestTipLength(centerFrame)
if zRange is None:
zRange = tipLength*1.5
if zStep is None:
zStep = zRange / 30.
minImgPos, maxImgPos, tipRelPos = self.getTipImageArea(centerFrame, padding=tipLength*0.15, tipLength=tipLength)
center = centerFrame.data()[0, minImgPos[0]:maxImgPos[0], minImgPos[1]:maxImgPos[1]]
center = self.filterImage(center)
# Decide how many frames to collect and at what z depths
nFrames = (int(zRange / zStep) // 2) * 2
pos = self.dev.globalPosition()
zStart = pos[2] + zStep * (nFrames // 2)
frames = []
bg_frames = []
corr = []
print("Collecting %d frames of %0.2fum tip length at %0.2fum resolution." % (nFrames, tipLength*1e6, zStep*1e6))
# Stop camera if it is currently running
restart = False
if imager.isRunning():
restart = True
imager.stop()
try:
with pg.ProgressDialog('Acquiring reference frames...', 0, nFrames*2+1) as dlg:
# collect 2 stacks of images (second stack is for background subtraction)
for j in range(2):
# Set initial focus above start point to reduce hysteresis in focus mechanism
scope = self.dev.scopeDevice()
scope.setFocusDepth(zStart + 10e-6)
# Acquire multiple frames at different depths
for i in range(nFrames):
#pos[2] = zStart - zStep * i
# self.dev._moveToGlobal(pos, 'slow').wait()
scope.setFocusDepth(zStart - zStep * i).wait()
frame = imager.acquireFrames(average)
img = frame.data()[:, minImgPos[0]:maxImgPos[0], minImgPos[1]:maxImgPos[1]].astype(float).mean(axis=0)
img = self.filterImage(img)
if j == 0:
frames.append(img)
corr.append(self._matchTemplateSingle(img, center)[1])
else:
bg_frames.append(img)
dlg += 1
if dlg.wasCanceled():
return
if j == 0:
# move tip out-of-frame to collect background images
self.dev._moveToLocal([-tipLength*3, 0, 0], 'slow').wait()
else:
self.dev._moveToLocal([tipLength*3, 0, 0], 'slow')
finally:
# restart camera if it was running
if restart:
imager.start()
scope.setFocusDepth(pos[2])
# find the index of the frame that most closely matches the initial, tip-focused frame
maxInd = np.argmax(corr)
# stack all frames into a 3D array
frames = np.dstack(frames).transpose((2, 0, 1))
bg_frames = np.dstack(bg_frames).transpose((2, 0, 1))
# subtract background
# frames -= bg_frame.data()
# generate downsampled frame versions
# (for now we generate these on the fly..)
# ds = [frames] + [pg.downsample(pg.downsample(frames, n, axis=1), n, axis=2) for n in [2, 4, 8]]
key = imager.getDeviceStateKey()
self.reference[key] = {
'frames': frames - bg_frames,
'zStep': zStep,
'centerInd': maxInd,
'centerPos': tipRelPos,
'pixelSize': frame.info()['pixelSize'],
'tipLength': tipLength,
# 'downsampledFrames' = ds,
}
# Store with pickle because configfile does not support arrays
pickle.dump(self.reference, open(self.dev.configFileName('ref_frames.pk'), 'wb'))
def measureTipPosition(self, padding=50e-6, threshold=0.7, frame=None, pos=None, tipLength=None, show=False):
"""Find the pipette tip location by template matching within a region surrounding the
expected tip position.
Return `((x, y, z), corr)`, where *corr* is the normalized cross-correlation value of
the best template match.
If the strength of the match is less than *threshold*, then raise RuntimeError.
"""
# Grab one frame (if it is not already supplied) and crop it to the region around the pipette tip.
if frame is None:
frame = self.takeFrame()
elif frame == 'next':
frame = self.getNextFrame()
# load up template images
reference = self._getReference()
if tipLength is None:
# select a tip length similar to template images
tipLength = reference['tipLength']
minImgPos, maxImgPos, tipRelPos = self.getTipImageArea(frame, padding, pos=pos, tipLength=tipLength)
img = frame.data()
if img.ndim == 3:
img = img[0]
img = img[minImgPos[0]:maxImgPos[0], minImgPos[1]:maxImgPos[1]]
img = self.filterImage(img)
# resample acquired image to match template pixel size
pxr = frame.info()['pixelSize'][0] / reference['pixelSize'][0]
if pxr != 1.0:
img = scipy.ndimage.zoom(img, pxr)
# run template match against all template frames, find the frame with the strongest match
match = [self.matchTemplate(img, t) for t in reference['frames']]
if show:
pg.plot([m[0][0] for m in match], title='x match vs z')
pg.plot([m[0][1] for m in match], title='y match vs z')
pg.plot([m[1] for m in match], title='match correlation vs z')
maxInd = np.argmax([m[1] for m in match])
if match[maxInd][1] < threshold:
raise RuntimeError("Unable to locate pipette tip (correlation %0.2f < %0.2f)" % (match[maxInd][1], threshold))
# measure z error
zErr = (maxInd - reference['centerInd']) * reference['zStep']
# measure xy position
offset = match[maxInd][0]
tipImgPos = (minImgPos[0] + (offset[0] + reference['centerPos'][0]) / pxr,
minImgPos[1] + (offset[1] + reference['centerPos'][1]) / pxr)
tipPos = frame.mapFromFrameToGlobal(pg.Vector(tipImgPos))
return (tipPos.x(), tipPos.y(), tipPos.z() + zErr), match[maxInd][1]
def measureError(self, padding=50e-6, threshold=0.7, frame=None, pos=None):
"""Return an (x, y, z) tuple indicating the error vector from the calibrated tip position to the
measured (actual) tip position.
"""
if pos is None:
expectedTipPos = self.dev.globalPosition()
else:
expectedTipPos = pos
measuredTipPos, corr = self.measureTipPosition(padding, threshold, frame, pos=pos)
return tuple([measuredTipPos[i] - expectedTipPos[i] for i in (0, 1, 2)])
def _getReference(self):
key = self._getImager().getDeviceStateKey()
try:
return self.reference[key]
except KeyError:
raise Exception("No reference frames found for this pipette / objective combination.")
def autoCalibrate(self, **kwds):
"""Automatically calibrate the pipette tip position using template matching on a single camera frame.
Return the offset in pipette-local coordinates and the normalized cross-correlation value of the template match.
All keyword arguments are passed to `measureTipPosition()`.
"""
# If no image padding is given, then use the template tip length as a first guess
if 'padding' not in kwds:
ref = self._getReference()
kwds['padding'] = ref['tipLength']
if 'frame' not in kwds:
kwds['frame'] = 'next'
try:
tipPos, corr = self.measureTipPosition(**kwds)
except RuntimeError:
kwds['padding'] *= 2
tipPos, corr = self.measureTipPosition(**kwds)
localError = self.dev.mapFromGlobal(tipPos)
tr = self.dev.deviceTransform()
tr.translate(pg.Vector(localError))
self.dev.setDeviceTransform(tr)
return localError, corr
def filterImage(self, img):
"""Return a filtered version of an image to be used in template matching.
Currently, no filtering is applied.
"""
# Sobel should reduce background artifacts, but it also seems to increase the noise in the signal
# itself--two images with slightly different focus can have a very bad match.
# import skimage.feature
# return skimage.filter.sobel(img)
img = scipy.ndimage.morphological_gradient(img, size=(3, 3))
return img
def matchTemplate(self, img, template, dsVals=(4, 2, 1)):
"""Match a template to image data.
Return the (x, y) pixel offset of the template and a value indicating the strength of the match.
For efficiency, the input images are downsampled and matched at low resolution before
iteratively re-matching at higher resolutions. The *dsVals* argument lists the downsampling values
that will be used, in order. Each value in this list must be an integer multiple of
the value that follows it.
"""
# Recursively match at increasing image resolution
imgDs = [pg.downsample(pg.downsample(img, n, axis=0), n, axis=1) for n in dsVals]
tmpDs = [pg.downsample(pg.downsample(template, n, axis=0), n, axis=1) for n in dsVals]
offset = np.array([0, 0])
for i, ds in enumerate(dsVals):
pos, val = self._matchTemplateSingle(imgDs[i], tmpDs[i])
pos = np.array(pos)
if i == len(dsVals) - 1:
offset += pos
# [pg.image(imgDs[j], title=str(j)) for j in range(len(dsVals))]
return offset, val
else:
scale = ds // dsVals[i+1]
assert scale == ds / dsVals[i+1], "dsVals must satisfy constraint: dsVals[i] == dsVals[i+1] * int(x)"
offset *= scale
offset += np.clip(((pos-1) * scale), 0, imgDs[i+1].shape)
end = offset + np.array(tmpDs[i+1].shape) + 3
end = np.clip(end, 0, imgDs[i+1].shape)
imgDs[i+1] = imgDs[i+1][offset[0]:end[0], offset[1]:end[1]]
def _matchTemplateSingle(self, img, template, show=False, unsharp=3):
import skimage.feature
if img.shape[0] < template.shape[0] or img.shape[1] < template.shape[1]:
raise ValueError("Image must be larger than template. %s %s" % (img.shape, template.shape))
cc = skimage.feature.match_template(img, template)
# high-pass filter; we're looking for a fairly sharp peak.
if unsharp is not False:
cc_filt = cc - scipy.ndimage.gaussian_filter(cc, (unsharp, unsharp))
else:
cc_filt = cc
if show:
pg.image(cc)
ind = np.argmax(cc_filt)
pos = np.unravel_index(ind, cc.shape)
val = cc[pos[0], pos[1]]
return pos, val
def mapErrors(self, nSteps=(5, 5, 7), stepSize=(50e-6, 50e-6, 50e-6), padding=60e-6,
threshold=0.4, speed='slow', show=False, intermediateDist=60e-6):
"""Move pipette tip randomly to locations in a grid and measure the position error
at each location.
All tip locations must be within the field of view.
"""
startTime = time.time()
start = np.array(self.dev.globalPosition())
npts = nSteps[0] * nSteps[1] * nSteps[2]
inds = np.mgrid[0:nSteps[0], 0:nSteps[1], 0:nSteps[2]].reshape((3, npts)).transpose()
order = np.arange(npts)
np.random.shuffle(order)
err = np.zeros(nSteps + (3,))
stepSize = np.array(stepSize)
if show:
imv = pg.image()
mark1 = Qt.QGraphicsEllipseItem(Qt.QRectF(-5, -5, 10, 10))
mark1.setBrush(pg.mkBrush(255, 255, 0, 100))
mark1.setZValue(100)
imv.addItem(mark1)
mark2 = Qt.QGraphicsEllipseItem(Qt.QRectF(-5, -5, 10, 10))
mark2.setBrush(pg.mkBrush(255, 0, 0, 100))
mark2.setZValue(100)
imv.addItem(mark2)
# loop over all points in random order, and such that we do heavy computation while
# pipette is moving.
images = []
offsets = []
try:
with pg.ProgressDialog("Acquiring error map...", 0, len(order)) as dlg:
for i in range(len(order)+1):
if i > 0:
lastPos = pos
if i < len(order):
ind = inds[order[i]]
pos = start.copy() + (stepSize * ind)
# Jump to position + a random 20um offset to avoid hysteresis
offset = np.random.normal(size=3)
offset *= intermediateDist / (offset**2).sum()**0.5
offsets.append(offset)
mfut = self.dev._moveToGlobal(pos + offset, speed)
ffut = self.dev.scopeDevice().setFocusDepth(pos[2], speed)
if i > 0:
ind = inds[order[i-1]]
print("Frame: %d %s" % (i-1, lastPos))
err[tuple(ind)] = self.measureError(padding=padding, threshold=threshold, frame=frame, pos=lastPos)
print(" error: %s" % err[tuple(ind)])
dlg += 1
if show:
imv.setImage(frame.data()[0])
p1 = frame.globalTransform().inverted()[0].map(pg.Vector(lastPos))
p2 = frame.globalTransform().inverted()[0].map(pg.Vector(lastPos + err[tuple(ind)]))
mark1.setPos(p1.x(), p1.y())
mark2.setPos(p2.x(), p2.y())
# wait for previous moves to complete
mfut.wait(updates=True)
ffut.wait(updates=True)
# step back to actual target position
self.dev._moveToGlobal(pos, speed).wait(updates=True)
frame = self.takeFrame()
if dlg.wasCanceled():
return None
finally:
self.dev._moveToGlobal(start, 'fast')
self.dev.scopeDevice().setFocusDepth(start[2], 'fast')
self.errorMap = {
'err': err,
'nSteps': nSteps,
'stepSize': stepSize,
'order': order,
'inds': inds,
'offsets': offsets,
'time': time.time() - startTime,
}
filename = self.dev.configFileName('error_map.np')
np.save(open(filename, 'wb'), self.errorMap)
return self.errorMap
def showErrorAnalysis(self):
if not hasattr(self, 'errorMap'):
filename = self.dev.configFileName('error_map.np')
self.errorMap = np.load(open(filename, 'rb'))[np.newaxis][0]
err = self.errorMap
imx = pg.image(err['err'][..., 0].transpose(1, 0, 2), title='X error')
imy = pg.image(err['err'][..., 1], title='Y error')
imz = pg.image(err['err'][..., 2], title='Z error')
# get N,3 array of offset values used to randomize hysteresis
off = np.vstack(err['offsets'])
sh = err['err'].shape
# Get N,3 array of measured position errors
errf = err['err'].reshape(sh[0]*sh[1]*sh[2], 3)[err['order']]
# Display histogram of errors
win = pg.GraphicsWindow(title="%s error" % self.dev.name())
# subtract out slow drift
normErr = errf - scipy.ndimage.gaussian_filter(errf, (20, 0))
# calculate magnitude of error
absErr = (normErr**2).sum(axis=1)**0.5
# errPlot.plot(absErr)
title = "Error Histogram (mean=%s)" % pg.siFormat(absErr.mean(), suffix='m')
errPlot = win.addPlot(row=0, col=0, title=title, labels={'bottom': ('Position error', 'm')})
hist = np.histogram(absErr, bins=50)
errPlot.plot(hist[1], hist[0], stepMode=True)
# display drift and hysteresis plots
driftPlot = win.addPlot(row=0, col=1, rowspan=1, colspan=2, title="Pipette Drift",
labels={'left': ('Position error', 'm'), 'bottom': ('Time', 's')})
driftPlot.plot(np.linspace(0, err['time'], errf.shape[0]), errf[:, 0], pen='r')
driftPlot.plot(np.linspace(0, err['time'], errf.shape[0]), errf[:, 1], pen='g')
driftPlot.plot(np.linspace(0, err['time'], errf.shape[0]), errf[:, 2], pen='b')
xhplot = win.addPlot(row=1, col=0, title='X Hysteresis',
labels={'left': ('Position error', 'm'), 'bottom': ('Last pipette movement', 'm')})
xhplot.plot(-off[:, 0], errf[:, 0], pen=None, symbol='o')
yhplot = win.addPlot(row=1, col=1, title='Y Hysteresis',
labels={'left': ('Position error', 'm'), 'bottom': ('Last pipette movement', 'm')})
yhplot.plot(-off[:, 1], errf[:, 1], pen=None, symbol='o')
zhplot = win.addPlot(row=1, col=2, title='Z Hysteresis',
labels={'left': ('Position error', 'm'), 'bottom': ('Last pipette movement', 'm')})
zhplot.plot(-off[:, 2], errf[:, 2], pen=None, symbol='o')
# Print best fit for manipulator axes
expPos = err['inds'] * err['stepSize']
measPos = expPos + off
guess = np.array([[1, 0, 0, 0],
[0, 1, 0, 0],
[0, 0, 1, 0]], dtype='float')
def errFn(v):
return ((measPos - np.dot(expPos, v.reshape(3,4))[:,:3])**2).sum()
fit = scipy.optimize.minimize(errFn, guess)
print("Pipette position transform:", fit)
self.errorMapAnalysis = (imx, imy, imz, win)
class DriftMonitor(Qt.QWidget):
def __init__(self, trackers):
self.trackers = trackers
self.nextFrame = None
Qt.QWidget.__init__(self)
self.timer = Qt.QTimer()
self.timer.timeout.connect(self.update)
self.layout = Qt.QGridLayout()
self.setLayout(self.layout)
self.gv = pg.GraphicsLayoutWidget()
self.layout.addWidget(self.gv, 0, 0)
self.plot = self.gv.addPlot(labels={'left': ('Drift distance', 'm'), 'bottom': ('Time', 's')})
self.plot.addLegend()
self.xplot = self.gv.addPlot(labels={'left': ('X position', 'm')}, row=1, col=0)
self.yplot = self.gv.addPlot(labels={'left': ('Y position', 'm')}, row=2, col=0)
self.zplot = self.gv.addPlot(labels={'left': ('Z position', 'm'), 'bottom': ('Time', 's')}, row=3, col=0)
for plt in [self.xplot, self.yplot, self.zplot]:
plt.setYRange(-10e-6, 10e-6)
self.pens = [(i, len(trackers)) for i in range(len(trackers))]
self.lines = [self.plot.plot(pen=self.pens[i], name=trackers[i].dev.name()) for i in range(len(trackers))]
# self.errors = [[] for i in range(len(trackers))]
# self.cumulative = np.zeros((len(trackers), 3))
self.positions = []
self.times = []
self.timer.start(2000)
trackers[0]._getImager().sigNewFrame.connect(self.newFrame)
self.show()
def newFrame(self, frame):
self.nextFrame = frame
def update(self):
try:
if self.nextFrame is None:
return
frame = self.nextFrame
self.nextFrame = None
self.times.append(time.time())
x = np.array(self.times)
x -= x[0]
pos = []
for i, t in enumerate(self.trackers):
try:
err, corr = t.autoCalibrate(frame=frame, padding=50e-6)
# err = np.array(err)
# self.cumulative[i] += err
# err = (self.cumulative[i]**2).sum()**0.5
pos.append(t.dev.globalPosition())
except RuntimeError:
pos.append([np.nan]*3)
# self.errors[i].append(err)
self.positions.append(pos)
pos = np.array(self.positions)
pos -= pos[0]
err = (pos**2).sum(axis=2)**0.5
for i, t in enumerate(self.trackers):
self.lines[i].setData(x, err[:, i])
for ax, plt in enumerate([self.xplot, self.yplot, self.zplot]):
plt.clear()
for i, t in enumerate(self.trackers):
plt.plot(x, pos[:, i, ax], pen=self.pens[i])
except Exception:
self.timer.stop()
raise
def closeEvent(self, event):
self.timer.stop()
return Qt.QWidget.closeEvent(self, event)
| campagnola/acq4 | acq4/devices/Pipette/tracker.py | Python | mit | 28,251 |
<?php
return [
'symlink_created_text' => '我们刚刚为您创建了缺失的软连接。',
'symlink_created_title' => '丢失的存储软连接已被重新创建',
'symlink_failed_text' => '我们未能为您的应用程序生成缺失的软连接,似乎您的主机提供商不支持它。',
'symlink_failed_title' => '无法创建丢失的存储软连接',
'symlink_missing_button' => '修复',
'symlink_missing_text' => '我们找不到一个存储软连接,这可能会导致从浏览器加载媒体文件的问题。',
'symlink_missing_title' => '缺失的存储软连接',
];
| handiwijoyo/voyager | publishable/lang/zh_CN/error.php | PHP | mit | 628 |
/**
* Copyright 2012-2017, Plotly, Inc.
* All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
'use strict';
module.exports = require('../src/traces/heatmapgl');
| chethanjjj/gentelella | vendors/plotlyjs/lib/heatmapgl.js | JavaScript | mit | 261 |
<?php
namespace Tiga\Framework\Response;
/**
* HTTP Header class.
*/
class Header
{
/**
* HTTP status code.
*
* @var int
*/
protected $statusCode;
/**
* Response.
*/
protected $response;
/**
* Create header class and init the hook.
*/
public function __construct()
{
$this->hook();
return $this;
}
/**
* Set response instance.
*
* @param Response $response
*/
public function setResponse($response)
{
$this->response = $response;
}
/**
* Hook this class into status_header.
*/
public function hook()
{
add_filter('status_header', array($this, 'sendHeaderResponse'), 100);
}
/**
* Send proper HTTP response.
*/
public function sendHeaderResponse()
{
if ($this->response != false) {
$this->response->sendHeaders();
return $this->response->getWpStatusCodeHeader();
}
}
}
| todiadiyatmo/tiga-framework | src/Response/Header.php | PHP | mit | 1,011 |
#region License
/*
The MIT License
Copyright (c) 2008 Sky Morey
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
#endregion
using System.Resources;
using System.Globalization;
namespace System.ComponentModel.DataAnnotations
{
/// <summary>
/// DataAnnotationsResources
/// </summary>
internal class DataAnnotationsResources
{
private static readonly ResourceManager _resourceManager = new ResourceManager("System.ComponentModel.DataAnnotations.Resources.DataAnnotationsResources", typeof(RequiredAttribute).Assembly);
internal static string RequiredAttribute_ValidationError
{
get { return _resourceManager.GetString("RequiredAttribute_ValidationError", ResourceCulture); }
}
private static CultureInfo ResourceCulture { get; set; }
}
}
| BclEx/AdamsyncEx | src/System.CoreEx/ComponentModel/DataAnnotations/DataAnnotationsResources.cs | C# | mit | 1,841 |
#Your application starts here
require_relative 'config/application'
puts "Put your application code in #{File.expand_path(__FILE__)}"
| thedanpan/toilets_directory_nyc | ar-skeleton/app.rb | Ruby | mit | 136 |
/*************************************************************************/
/* editor_run.cpp */
/*************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
/* https://godotengine.org */
/*************************************************************************/
/* Copyright (c) 2007-2020 Juan Linietsky, Ariel Manzur. */
/* Copyright (c) 2014-2020 Godot Engine contributors (cf. AUTHORS.md). */
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/*************************************************************************/
#include "editor_run.h"
#include "core/project_settings.h"
#include "editor_settings.h"
#include "servers/display_server.h"
EditorRun::Status EditorRun::get_status() const {
return status;
}
String EditorRun::get_running_scene() const {
return running_scene;
}
Error EditorRun::run(const String &p_scene, const String &p_custom_args, const List<String> &p_breakpoints, const bool &p_skip_breakpoints) {
List<String> args;
String resource_path = ProjectSettings::get_singleton()->get_resource_path();
String remote_host = EditorSettings::get_singleton()->get("network/debug/remote_host");
int remote_port = (int)EditorSettings::get_singleton()->get("network/debug/remote_port");
if (resource_path != "") {
args.push_back("--path");
args.push_back(resource_path.replace(" ", "%20"));
}
args.push_back("--remote-debug");
args.push_back("tcp://" + remote_host + ":" + String::num(remote_port));
args.push_back("--allow_focus_steal_pid");
args.push_back(itos(OS::get_singleton()->get_process_id()));
bool debug_collisions = EditorSettings::get_singleton()->get_project_metadata("debug_options", "run_debug_collisons", false);
bool debug_navigation = EditorSettings::get_singleton()->get_project_metadata("debug_options", "run_debug_navigation", false);
if (debug_collisions) {
args.push_back("--debug-collisions");
}
if (debug_navigation) {
args.push_back("--debug-navigation");
}
int screen = EditorSettings::get_singleton()->get("run/window_placement/screen");
if (screen == 0) {
// Same as editor
screen = DisplayServer::get_singleton()->window_get_current_screen();
} else if (screen == 1) {
// Previous monitor (wrap to the other end if needed)
screen = Math::wrapi(
DisplayServer::get_singleton()->window_get_current_screen() - 1,
0,
DisplayServer::get_singleton()->get_screen_count());
} else if (screen == 2) {
// Next monitor (wrap to the other end if needed)
screen = Math::wrapi(
DisplayServer::get_singleton()->window_get_current_screen() + 1,
0,
DisplayServer::get_singleton()->get_screen_count());
} else {
// Fixed monitor ID
// There are 3 special options, so decrement the option ID by 3 to get the monitor ID
screen -= 3;
}
if (OS::get_singleton()->is_disable_crash_handler()) {
args.push_back("--disable-crash-handler");
}
Rect2 screen_rect;
screen_rect.position = DisplayServer::get_singleton()->screen_get_position(screen);
screen_rect.size = DisplayServer::get_singleton()->screen_get_size(screen);
Size2 desired_size;
desired_size.x = ProjectSettings::get_singleton()->get("display/window/size/width");
desired_size.y = ProjectSettings::get_singleton()->get("display/window/size/height");
Size2 test_size;
test_size.x = ProjectSettings::get_singleton()->get("display/window/size/test_width");
test_size.y = ProjectSettings::get_singleton()->get("display/window/size/test_height");
if (test_size.x > 0 && test_size.y > 0) {
desired_size = test_size;
}
int window_placement = EditorSettings::get_singleton()->get("run/window_placement/rect");
bool hidpi_proj = ProjectSettings::get_singleton()->get("display/window/dpi/allow_hidpi");
int display_scale = 1;
if (DisplayServer::get_singleton()->has_feature(DisplayServer::FEATURE_HIDPI)) {
if (OS::get_singleton()->is_hidpi_allowed()) {
if (hidpi_proj) {
display_scale = 1; // Both editor and project runs in hiDPI mode, do not scale.
} else {
display_scale = DisplayServer::get_singleton()->screen_get_max_scale(); // Editor is in hiDPI mode, project is not, scale down.
}
} else {
if (hidpi_proj) {
display_scale = (1.f / DisplayServer::get_singleton()->screen_get_max_scale()); // Editor is not in hiDPI mode, project is, scale up.
} else {
display_scale = 1; // Both editor and project runs in lowDPI mode, do not scale.
}
}
screen_rect.position /= display_scale;
screen_rect.size /= display_scale;
}
switch (window_placement) {
case 0: { // top left
args.push_back("--position");
args.push_back(itos(screen_rect.position.x) + "," + itos(screen_rect.position.y));
} break;
case 1: { // centered
Vector2 pos = (screen_rect.position) + ((screen_rect.size - desired_size) / 2).floor();
args.push_back("--position");
args.push_back(itos(pos.x) + "," + itos(pos.y));
} break;
case 2: { // custom pos
Vector2 pos = EditorSettings::get_singleton()->get("run/window_placement/rect_custom_position");
pos += screen_rect.position;
args.push_back("--position");
args.push_back(itos(pos.x) + "," + itos(pos.y));
} break;
case 3: { // force maximized
Vector2 pos = screen_rect.position;
args.push_back("--position");
args.push_back(itos(pos.x) + "," + itos(pos.y));
args.push_back("--maximized");
} break;
case 4: { // force fullscreen
Vector2 pos = screen_rect.position;
args.push_back("--position");
args.push_back(itos(pos.x) + "," + itos(pos.y));
args.push_back("--fullscreen");
} break;
}
if (p_breakpoints.size()) {
args.push_back("--breakpoints");
String bpoints;
for (const List<String>::Element *E = p_breakpoints.front(); E; E = E->next()) {
bpoints += E->get().replace(" ", "%20");
if (E->next()) {
bpoints += ",";
}
}
args.push_back(bpoints);
}
if (p_skip_breakpoints) {
args.push_back("--skip-breakpoints");
}
if (p_scene != "") {
args.push_back(p_scene);
}
if (p_custom_args != "") {
Vector<String> cargs = p_custom_args.split(" ", false);
for (int i = 0; i < cargs.size(); i++) {
args.push_back(cargs[i].replace(" ", "%20"));
}
}
String exec = OS::get_singleton()->get_executable_path();
printf("Running: %s", exec.utf8().get_data());
for (List<String>::Element *E = args.front(); E; E = E->next()) {
printf(" %s", E->get().utf8().get_data());
};
printf("\n");
int instances = EditorSettings::get_singleton()->get_project_metadata("debug_options", "run_debug_instances", 1);
for (int i = 0; i < instances; i++) {
OS::ProcessID pid = 0;
Error err = OS::get_singleton()->execute(exec, args, false, &pid);
ERR_FAIL_COND_V(err, err);
pids.push_back(pid);
}
status = STATUS_PLAY;
if (p_scene != "") {
running_scene = p_scene;
}
return OK;
}
bool EditorRun::has_child_process(OS::ProcessID p_pid) const {
for (const List<OS::ProcessID>::Element *E = pids.front(); E; E = E->next()) {
if (E->get() == p_pid) {
return true;
}
}
return false;
}
void EditorRun::stop_child_process(OS::ProcessID p_pid) {
if (has_child_process(p_pid)) {
OS::get_singleton()->kill(p_pid);
pids.erase(p_pid);
}
}
void EditorRun::stop() {
if (status != STATUS_STOP && pids.size() > 0) {
for (List<OS::ProcessID>::Element *E = pids.front(); E; E = E->next()) {
OS::get_singleton()->kill(E->get());
}
}
status = STATUS_STOP;
running_scene = "";
}
EditorRun::EditorRun() {
status = STATUS_STOP;
running_scene = "";
}
| Paulloz/godot | editor/editor_run.cpp | C++ | mit | 9,100 |
/*******************************************************************************
Copyright (C) 2015 Dario Oliveri
See copyright notice in LICENSE.md
*******************************************************************************/
#pragma once
#include <list>
#include <typeindex>
#include <unordered_map>
#include "InfectorTypes.hpp"
namespace Infector {
namespace priv {
class ConcreteContainer;
/** Dependency Direct Acyclic Graph.
This collect dependencies between concrete types only ( A bit pointless
tracing dependencies between interfaces since interfaces has no ctor).
A Dependency DAG lives in one context only (Context inheritance is resolved
by context only by creating a SymbolTable)*/
class DependencyDAG{
public:
DependencyDAG( DependencyDAG * parent);
~DependencyDAG();
void setGuard( TypeInfoP g);
//type resolution is container responsibility
void dependOn( TypeInfoP wired, TypeInfoP abstractDep,
ConcreteContainer * container);
void remove( TypeInfoP concrete);
/** clean memory used by DAG.*/
void clean();
using EdgeMap = std::unordered_map< std::type_index,
std::list< TypeInfoP> >;
std::list<TypeInfoP> getDependencies( TypeInfoP concrete);
//
//when you realize your Typo was "TypO" instead of "TypE" u.u ...
std::list<TypeInfoP> getDependencies( std::type_index & concrete);
private:
using EdgeMapPtr = std::shared_ptr<EdgeMap>;
EdgeMapPtr dependencies = std::make_shared<EdgeMap>();
EdgeMapPtr dependants = std::make_shared<EdgeMap>();
TypeInfoP guard = nullptr;
DependencyDAG* parent = nullptr;
void removeDependant( TypeInfoP wired, TypeInfoP abstractDep);
void addDependency( TypeInfoP wired, TypeInfoP abstractDep);
void addDependant( TypeInfoP wired, TypeInfoP abstractDep);
void checkGuardBreaking( TypeInfoP currentNode,
ConcreteContainer * container,
int HARD_RECURSION_LIMIT);
};
} // namespace priv
} // namespace Infector | Darelbi/Infectorpp | include/Infectorpp/priv/DependencyDAG.hpp | C++ | mit | 1,969 |
using Microsoft.CodeAnalysis;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Linq;
using System.Threading.Tasks;
namespace Csla.Analyzers.Tests
{
[TestClass]
public sealed class IsOperationMethodPublicAnalyzerTests
{
[TestMethod]
public void VerifySupportedDiagnostics()
{
var analyzer = new IsOperationMethodPublicAnalyzer();
var diagnostics = analyzer.SupportedDiagnostics;
Assert.AreEqual(2, diagnostics.Length);
var diagnostic = diagnostics.Single(_ => _.Id == Constants.AnalyzerIdentifiers.IsOperationMethodPublic);
Assert.AreEqual(IsOperationMethodPublicAnalyzerConstants.Title, diagnostic.Title.ToString(),
nameof(DiagnosticDescriptor.Title));
Assert.AreEqual(IsOperationMethodPublicAnalyzerConstants.Message, diagnostic.MessageFormat.ToString(),
nameof(DiagnosticDescriptor.MessageFormat));
Assert.AreEqual(Constants.Categories.Design, diagnostic.Category,
nameof(DiagnosticDescriptor.Category));
Assert.AreEqual(DiagnosticSeverity.Warning, diagnostic.DefaultSeverity,
nameof(DiagnosticDescriptor.DefaultSeverity));
Assert.AreEqual(HelpUrlBuilder.Build(Constants.AnalyzerIdentifiers.IsOperationMethodPublic, nameof(IsOperationMethodPublicAnalyzer)),
diagnostic.HelpLinkUri,
nameof(DiagnosticDescriptor.HelpLinkUri));
var diagnosticForInterface = diagnostics.Single(_ => _.Id == Constants.AnalyzerIdentifiers.IsOperationMethodPublicForInterface);
Assert.AreEqual(IsOperationMethodPublicAnalyzerConstants.Title, diagnosticForInterface.Title.ToString(),
nameof(DiagnosticDescriptor.Title));
Assert.AreEqual(IsOperationMethodPublicAnalyzerConstants.Message, diagnosticForInterface.MessageFormat.ToString(),
nameof(DiagnosticDescriptor.MessageFormat));
Assert.AreEqual(Constants.Categories.Design, diagnosticForInterface.Category,
nameof(DiagnosticDescriptor.Category));
Assert.AreEqual(DiagnosticSeverity.Warning, diagnosticForInterface.DefaultSeverity,
nameof(DiagnosticDescriptor.DefaultSeverity));
Assert.AreEqual(HelpUrlBuilder.Build(Constants.AnalyzerIdentifiers.IsOperationMethodPublicForInterface, nameof(IsOperationMethodPublicAnalyzer)),
diagnosticForInterface.HelpLinkUri,
nameof(DiagnosticDescriptor.HelpLinkUri));
}
[TestMethod]
public async Task AnalyzeWhenTypeIsNotStereotype()
{
var code =
@"public class A
{
[Fetch]
public void Fetch() { }
}";
await TestHelpers.RunAnalysisAsync<IsOperationMethodPublicAnalyzer>(
code, Array.Empty<string>());
}
[TestMethod]
public async Task AnalyzeWhenTypeIsStereotypeAndMethodIsNotADataPortalOperation()
{
var code =
@"using Csla;
using System;
[Serializable]
public class A : BusinessBase<A>
{
public void AMethod() { }
}";
await TestHelpers.RunAnalysisAsync<IsOperationMethodPublicAnalyzer>(
code, Array.Empty<string>());
}
[TestMethod]
public async Task AnalyzeWhenTypeIsStereotypeAndMethodIsADataPortalOperationThatIsNotPublic()
{
var code =
@"using Csla;
using System;
[Serializable]
public class A : BusinessBase<A>
{
private void DataPortal_Fetch() { }
}";
await TestHelpers.RunAnalysisAsync<IsOperationMethodPublicAnalyzer>(
code, Array.Empty<string>());
}
[TestMethod]
public async Task AnalyzeWhenTypeIsStereotypeAndMethodIsADataPortalOperationThatIsPublicAndClassIsNotSealed()
{
var code =
@"using Csla;
using System;
[Serializable]
public class A : BusinessBase<A>
{
[Fetch]
public void Fetch() { }
}";
await TestHelpers.RunAnalysisAsync<IsOperationMethodPublicAnalyzer>(code,
new[] { Constants.AnalyzerIdentifiers.IsOperationMethodPublic },
diagnostics => Assert.AreEqual(false.ToString(), diagnostics[0].Properties[IsOperationMethodPublicAnalyzerConstants.IsSealed]));
}
[TestMethod]
public async Task AnalyzeWhenTypeIsStereotypeAndMethodIsADataPortalOperationThatIsPublicAndClassIsSealed()
{
var code =
@"using Csla;
using System;
[Serializable]
public sealed class A : BusinessBase<A>
{
[Fetch]
public void Fetch() { }
}";
await TestHelpers.RunAnalysisAsync<IsOperationMethodPublicAnalyzer>(code,
new[] { Constants.AnalyzerIdentifiers.IsOperationMethodPublic },
diagnostics => Assert.AreEqual(true.ToString(), diagnostics[0].Properties[IsOperationMethodPublicAnalyzerConstants.IsSealed]));
}
[TestMethod]
public async Task AnalyzeWhenTypeIsStereotypeAndMethodIsADataPortalOperationThatIsPublicAndTypeIsInterface()
{
var code =
@"using Csla;
using Csla.Core;
public interface A
: IBusinessObject
{
[Fetch]
void Fetch();
}";
await TestHelpers.RunAnalysisAsync<IsOperationMethodPublicAnalyzer>(code,
new[] { Constants.AnalyzerIdentifiers.IsOperationMethodPublicForInterface });
}
}
} | MarimerLLC/csla | Source/Csla.Analyzers/Csla.Analyzers.Tests/IsOperationMethodPublicAnalyzerTests.cs | C# | mit | 4,968 |
import json
import unittest2
from appengine_fixture_loader.loader import load_fixture
from google.appengine.ext import testbed
from google.appengine.ext import ndb
from helpers.event_simulator import EventSimulator
from helpers.event_team_status_helper import EventTeamStatusHelper
from models.event import Event
from models.event_details import EventDetails
from models.match import Match
class TestSimulated2016nytrEventTeamStatusHelper(unittest2.TestCase):
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_memcache_stub()
self.testbed.init_taskqueue_stub(root_path=".")
ndb.get_context().clear_cache() # Prevent data from leaking between tests
def tearDown(self):
self.testbed.deactivate()
def testSimulatedEvent(self):
es = EventSimulator()
event = Event.get_by_id('2016nytr')
es.step()
event = Event.get_by_id('2016nytr')
status = EventTeamStatusHelper.generate_team_at_event_status('frc359', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc359', status),
'Team 359 is <b>Rank 15/36</b> with a record of <b>0-0-0</b> in quals.')
for _ in xrange(5):
es.step()
event = Event.get_by_id('2016nytr')
status = EventTeamStatusHelper.generate_team_at_event_status('frc359', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc359', status),
'Team 359 is <b>Rank 6/36</b> with a record of <b>1-0-0</b> in quals.')
for _ in xrange(67):
es.step()
event = Event.get_by_id('2016nytr')
status = EventTeamStatusHelper.generate_team_at_event_status('frc359', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc359', status),
'Team 359 was <b>Rank 1/36</b> with a record of <b>11-1-0</b> in quals.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5240', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5240', status),
'Team 5240 was <b>Rank 4/36</b> with a record of <b>9-3-0</b> in quals.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc229', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc229', status),
'Team 229 was <b>Rank 16/36</b> with a record of <b>6-6-0</b> in quals.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc1665', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc1665', status),
'Team 1665 was <b>Rank 15/36</b> with a record of <b>6-6-0</b> in quals.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5964', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5964', status),
'Team 5964 was <b>Rank 21/36</b> with a record of <b>6-6-0</b> in quals.')
es.step() # Alliance selections added
event = Event.get_by_id('2016nytr')
status = EventTeamStatusHelper.generate_team_at_event_status('frc359', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc359', status),
'Team 359 was <b>Rank 1/36</b> with a record of <b>11-1-0</b> in quals and will be competing in the playoffs as the <b>Captain</b> of <b>Alliance 1</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5240', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5240', status),
'Team 5240 was <b>Rank 4/36</b> with a record of <b>9-3-0</b> in quals and will be competing in the playoffs as the <b>1st Pick</b> of <b>Alliance 4</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc229', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc229', status),
'Team 229 was <b>Rank 16/36</b> with a record of <b>6-6-0</b> in quals and will be competing in the playoffs as the <b>2nd Pick</b> of <b>Alliance 2</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc1665', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc1665', status),
'Team 1665 was <b>Rank 15/36</b> with a record of <b>6-6-0</b> in quals.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5964', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5964', status),
'Team 5964 was <b>Rank 21/36</b> with a record of <b>6-6-0</b> in quals.')
es.step() # QF schedule added
event = Event.get_by_id('2016nytr')
status = EventTeamStatusHelper.generate_team_at_event_status('frc359', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc359', status),
'Team 359 is <b>0-0-0</b> in the <b>Quarterfinals</b> as the <b>Captain</b> of <b>Alliance 1</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5240', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5240', status),
'Team 5240 is <b>0-0-0</b> in the <b>Quarterfinals</b> as the <b>1st Pick</b> of <b>Alliance 4</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc229', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc229', status),
'Team 229 is <b>0-0-0</b> in the <b>Quarterfinals</b> as the <b>2nd Pick</b> of <b>Alliance 2</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc1665', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc1665', status),
'Team 1665 was <b>Rank 15/36</b> with a record of <b>6-6-0</b> in quals.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5964', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5964', status),
'Team 5964 was <b>Rank 21/36</b> with a record of <b>6-6-0</b> in quals.')
es.step() # qf1m1
event = Event.get_by_id('2016nytr')
status = EventTeamStatusHelper.generate_team_at_event_status('frc359', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc359', status),
'Team 359 is <b>1-0-0</b> in the <b>Quarterfinals</b> as the <b>Captain</b> of <b>Alliance 1</b>.')
es.step() # qf2m1
es.step() # qf3m1
es.step() # qf4m1
es.step() # qf1m2
event = Event.get_by_id('2016nytr')
status = EventTeamStatusHelper.generate_team_at_event_status('frc359', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc359', status),
'Team 359 is <b>0-0-0</b> in the <b>Semifinals</b> as the <b>Captain</b> of <b>Alliance 1</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5240', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5240', status),
'Team 5240 is <b>0-1-0</b> in the <b>Quarterfinals</b> as the <b>1st Pick</b> of <b>Alliance 4</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc229', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc229', status),
'Team 229 is <b>1-0-0</b> in the <b>Quarterfinals</b> as the <b>2nd Pick</b> of <b>Alliance 2</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc1665', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc1665', status),
'Team 1665 was <b>Rank 15/36</b> with a record of <b>6-6-0</b> in quals.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5964', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5964', status),
'Team 5964 was <b>Rank 21/36</b> with a record of <b>6-6-0</b> in quals.')
es.step() # qf2m2
event = Event.get_by_id('2016nytr')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5240', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5240', status),
'Team 5240 is <b>1-1-0</b> in the <b>Quarterfinals</b> as the <b>1st Pick</b> of <b>Alliance 4</b>.')
es.step() # qf3m2
es.step() # qf4m2
es.step() # qf2m3
event = Event.get_by_id('2016nytr')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5240', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5240', status),
'Team 5240 is <b>0-0-0</b> in the <b>Semifinals</b> as the <b>1st Pick</b> of <b>Alliance 4</b>.')
es.step() # qf4m3
es.step() # sf1m1
event = Event.get_by_id('2016nytr')
status = EventTeamStatusHelper.generate_team_at_event_status('frc359', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc359', status),
'Team 359 is <b>1-0-0</b> in the <b>Semifinals</b> as the <b>Captain</b> of <b>Alliance 1</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5240', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5240', status),
'Team 5240 is <b>0-1-0</b> in the <b>Semifinals</b> as the <b>1st Pick</b> of <b>Alliance 4</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc229', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc229', status),
'Team 229 is <b>0-0-0</b> in the <b>Semifinals</b> as the <b>2nd Pick</b> of <b>Alliance 2</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc1665', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc1665', status),
'Team 1665 was <b>Rank 15/36</b> with a record of <b>6-6-0</b> in quals.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5964', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5964', status),
'Team 5964 was <b>Rank 21/36</b> with a record of <b>6-6-0</b> in quals.')
es.step() # sf2m1
es.step() # sf1m2
event = Event.get_by_id('2016nytr')
status = EventTeamStatusHelper.generate_team_at_event_status('frc359', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc359', status),
'Team 359 is <b>0-0-0</b> in the <b>Finals</b> as the <b>Captain</b> of <b>Alliance 1</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5240', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5240', status),
'Team 5240 was <b>Rank 4/36</b> with a record of <b>9-3-0</b> in quals, competed in the playoffs as the <b>1st Pick</b> of <b>Alliance 4</b>, and was <b>eliminated in the Semifinals</b> with a playoff record of <b>2-3-0</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc229', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc229', status),
'Team 229 is <b>0-1-0</b> in the <b>Semifinals</b> as the <b>2nd Pick</b> of <b>Alliance 2</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc1665', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc1665', status),
'Team 1665 was <b>Rank 15/36</b> with a record of <b>6-6-0</b> in quals.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5964', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5964', status),
'Team 5964 was <b>Rank 21/36</b> with a record of <b>6-6-0</b> in quals.')
es.step() # sf2m2
event = Event.get_by_id('2016nytr')
status = EventTeamStatusHelper.generate_team_at_event_status('frc359', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc359', status),
'Team 359 is <b>0-0-0</b> in the <b>Finals</b> as the <b>Captain</b> of <b>Alliance 1</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5240', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5240', status),
'Team 5240 was <b>Rank 4/36</b> with a record of <b>9-3-0</b> in quals, competed in the playoffs as the <b>1st Pick</b> of <b>Alliance 4</b>, and was <b>eliminated in the Semifinals</b> with a playoff record of <b>2-3-0</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc229', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc229', status),
'Team 229 is <b>1-1-0</b> in the <b>Semifinals</b> as the <b>2nd Pick</b> of <b>Alliance 2</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc1665', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc1665', status),
'Team 1665 is <b>1-1-0</b> in the <b>Semifinals</b> as the <b>Backup</b> of <b>Alliance 2</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5964', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5964', status),
'Team 5964 was <b>Rank 21/36</b> with a record of <b>6-6-0</b> in quals.')
es.step() # sf2m3
event = Event.get_by_id('2016nytr')
status = EventTeamStatusHelper.generate_team_at_event_status('frc359', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc359', status),
'Team 359 is <b>0-0-0</b> in the <b>Finals</b> as the <b>Captain</b> of <b>Alliance 1</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5240', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5240', status),
'Team 5240 was <b>Rank 4/36</b> with a record of <b>9-3-0</b> in quals, competed in the playoffs as the <b>1st Pick</b> of <b>Alliance 4</b>, and was <b>eliminated in the Semifinals</b> with a playoff record of <b>2-3-0</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc229', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc229', status),
'Team 229 is <b>0-0-0</b> in the <b>Finals</b> as the <b>2nd Pick</b> of <b>Alliance 2</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc1665', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc1665', status),
'Team 1665 is <b>0-0-0</b> in the <b>Finals</b> as the <b>Backup</b> of <b>Alliance 2</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5964', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5964', status),
'Team 5964 was <b>Rank 21/36</b> with a record of <b>6-6-0</b> in quals.')
es.step() # f1m1
event = Event.get_by_id('2016nytr')
status = EventTeamStatusHelper.generate_team_at_event_status('frc359', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc359', status),
'Team 359 is <b>1-0-0</b> in the <b>Finals</b> as the <b>Captain</b> of <b>Alliance 1</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5240', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5240', status),
'Team 5240 was <b>Rank 4/36</b> with a record of <b>9-3-0</b> in quals, competed in the playoffs as the <b>1st Pick</b> of <b>Alliance 4</b>, and was <b>eliminated in the Semifinals</b> with a playoff record of <b>2-3-0</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc229', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc229', status),
'Team 229 is <b>0-1-0</b> in the <b>Finals</b> as the <b>2nd Pick</b> of <b>Alliance 2</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc1665', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc1665', status),
'Team 1665 is <b>0-1-0</b> in the <b>Finals</b> as the <b>Backup</b> of <b>Alliance 2</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5964', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5964', status),
'Team 5964 was <b>Rank 21/36</b> with a record of <b>6-6-0</b> in quals.')
es.step() # f1m2
event = Event.get_by_id('2016nytr')
status = EventTeamStatusHelper.generate_team_at_event_status('frc359', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc359', status),
'Team 359 is <b>1-1-0</b> in the <b>Finals</b> as the <b>Captain</b> of <b>Alliance 1</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5240', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5240', status),
'Team 5240 was <b>Rank 4/36</b> with a record of <b>9-3-0</b> in quals, competed in the playoffs as the <b>1st Pick</b> of <b>Alliance 4</b>, and was <b>eliminated in the Semifinals</b> with a playoff record of <b>2-3-0</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc229', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc229', status),
'Team 229 is <b>1-1-0</b> in the <b>Finals</b> as the <b>2nd Pick</b> of <b>Alliance 2</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc1665', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc1665', status),
'Team 1665 is <b>1-1-0</b> in the <b>Finals</b> as the <b>Backup</b> of <b>Alliance 2</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5964', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5964', status),
'Team 5964 was <b>Rank 21/36</b> with a record of <b>6-6-0</b> in quals.')
es.step() # f1m3
event = Event.get_by_id('2016nytr')
status = EventTeamStatusHelper.generate_team_at_event_status('frc359', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc359', status),
'Team 359 was <b>Rank 1/36</b> with a record of <b>11-1-0</b> in quals, competed in the playoffs as the <b>Captain</b> of <b>Alliance 1</b>, and <b>won the event</b> with a playoff record of <b>6-1-0</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5240', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5240', status),
'Team 5240 was <b>Rank 4/36</b> with a record of <b>9-3-0</b> in quals, competed in the playoffs as the <b>1st Pick</b> of <b>Alliance 4</b>, and was <b>eliminated in the Semifinals</b> with a playoff record of <b>2-3-0</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc229', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc229', status),
'Team 229 was <b>Rank 16/36</b> with a record of <b>6-6-0</b> in quals, competed in the playoffs as the <b>2nd Pick</b> of <b>Alliance 2</b>, and was <b>eliminated in the Finals</b> with a playoff record of <b>5-3-0</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc1665', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc1665', status),
'Team 1665 was <b>Rank 15/36</b> with a record of <b>6-6-0</b> in quals, competed in the playoffs as the <b>Backup</b> of <b>Alliance 2</b>, and was <b>eliminated in the Finals</b> with a playoff record of <b>5-3-0</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5964', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5964', status),
'Team 5964 was <b>Rank 21/36</b> with a record of <b>6-6-0</b> in quals.')
class Test2016nytrEventTeamStatusHelper(unittest2.TestCase):
status_359 = {
"alliance": {
"backup": None,
"name": "Alliance 1",
"number": 1,
"pick": 0
},
"playoff": {
"current_level_record": {
"losses": 1,
"ties": 0,
"wins": 2
},
"level": "f",
"playoff_average": None,
"record": {
"losses": 1,
"ties": 0,
"wins": 6
},
"status": "won"
},
"qual": {
"num_teams": 36,
"ranking": {
"dq": 0,
"matches_played": 12,
"qual_average": None,
"rank": 1,
"record": {
"losses": 1,
"ties": 0,
"wins": 11
},
"sort_orders": [
39.0,
310.0,
165.0,
448.0,
600.0
],
"team_key": "frc359"
},
"sort_order_info": [
{
"name": "Ranking Score",
"precision": 0
},
{
"name": "Auto",
"precision": 0
},
{
"name": "Scale/Challenge",
"precision": 0
},
{
"name": "Goals",
"precision": 0
},
{
"name": "Defense",
"precision": 0
}
],
"status": "completed"
}
}
status_5240 = {
"alliance": {
"backup": None,
"name": "Alliance 4",
"number": 4,
"pick": 1
},
"playoff": {
"current_level_record": {
"losses": 2,
"ties": 0,
"wins": 0
},
"level": "sf",
"playoff_average": None,
"record": {
"losses": 3,
"ties": 0,
"wins": 2
},
"status": "eliminated"
},
"qual": {
"num_teams": 36,
"ranking": {
"dq": 0,
"matches_played": 12,
"qual_average": None,
"rank": 6,
"record": {
"losses": 3,
"ties": 0,
"wins": 9
},
"sort_orders": [
28.0,
260.0,
150.0,
191.0,
575.0
],
"team_key": "frc5240"
},
"sort_order_info": [
{
"name": "Ranking Score",
"precision": 0
},
{
"name": "Auto",
"precision": 0
},
{
"name": "Scale/Challenge",
"precision": 0
},
{
"name": "Goals",
"precision": 0
},
{
"name": "Defense",
"precision": 0
}
],
"status": "completed"
}
}
status_229 = {
"alliance": {
"backup": {
"in": "frc1665",
"out": "frc229"
},
"name": "Alliance 2",
"number": 2,
"pick": 2
},
"playoff": {
"current_level_record": {
"losses": 2,
"ties": 0,
"wins": 1
},
"level": "f",
"playoff_average": None,
"record": {
"losses": 3,
"ties": 0,
"wins": 5
},
"status": "eliminated"
},
"qual": {
"num_teams": 36,
"ranking": {
"dq": 0,
"matches_played": 12,
"qual_average": None,
"rank": 20,
"record": {
"losses": 6,
"ties": 0,
"wins": 6
},
"sort_orders": [
20.0,
156.0,
130.0,
119.0,
525.0
],
"team_key": "frc229"
},
"sort_order_info": [
{
"name": "Ranking Score",
"precision": 0
},
{
"name": "Auto",
"precision": 0
},
{
"name": "Scale/Challenge",
"precision": 0
},
{
"name": "Goals",
"precision": 0
},
{
"name": "Defense",
"precision": 0
}
],
"status": "completed"
}
}
status_1665 = {
"alliance": {
"backup": {
"in": "frc1665",
"out": "frc229"
},
"name": "Alliance 2",
"number": 2,
"pick": -1
},
"playoff": {
"current_level_record": {
"losses": 2,
"ties": 0,
"wins": 1
},
"level": "f",
"playoff_average": None,
"record": {
"losses": 3,
"ties": 0,
"wins": 5
},
"status": "eliminated"
},
"qual": {
"num_teams": 36,
"ranking": {
"dq": 0,
"matches_played": 12,
"qual_average": None,
"rank": 18,
"record": {
"losses": 6,
"ties": 0,
"wins": 6
},
"sort_orders": [
20.0,
192.0,
105.0,
146.0,
525.0
],
"team_key": "frc1665"
},
"sort_order_info": [
{
"name": "Ranking Score",
"precision": 0
},
{
"name": "Auto",
"precision": 0
},
{
"name": "Scale/Challenge",
"precision": 0
},
{
"name": "Goals",
"precision": 0
},
{
"name": "Defense",
"precision": 0
}
],
"status": "completed"
}
}
status_5964 = {
"alliance": None,
"playoff": None,
"qual": {
"num_teams": 36,
"ranking": {
"dq": 0,
"matches_played": 12,
"qual_average": None,
"rank": 23,
"record": {
"losses": 6,
"ties": 0,
"wins": 6
},
"sort_orders": [
19.0,
218.0,
110.0,
159.0,
520.0
],
"team_key": "frc5964"
},
"sort_order_info": [
{
"name": "Ranking Score",
"precision": 0
},
{
"name": "Auto",
"precision": 0
},
{
"name": "Scale/Challenge",
"precision": 0
},
{
"name": "Goals",
"precision": 0
},
{
"name": "Defense",
"precision": 0
}
],
"status": "completed"
}
}
status_1124 = {
"qual": None,
"playoff": None,
"alliance": None
}
# Because I can't figure out how to get these to generate
def event_key_adder(self, obj):
obj.event = ndb.Key(Event, '2016nytr')
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_memcache_stub()
ndb.get_context().clear_cache() # Prevent data from leaking between tests
load_fixture('test_data/fixtures/2016nytr_event_team_status.json',
kind={'EventDetails': EventDetails, 'Event': Event, 'Match': Match},
post_processor=self.event_key_adder)
self.event = Event.get_by_id('2016nytr')
self.assertIsNotNone(self.event)
def tearDown(self):
self.testbed.deactivate()
def testEventWinner(self):
status = EventTeamStatusHelper.generate_team_at_event_status('frc359', self.event)
self.assertDictEqual(status, self.status_359)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc359', status),
'Team 359 was <b>Rank 1/36</b> with a record of <b>11-1-0</b> in quals, competed in the playoffs as the <b>Captain</b> of <b>Alliance 1</b>, and <b>won the event</b> with a playoff record of <b>6-1-0</b>.')
def testElimSemisAndFirstPick(self):
status = EventTeamStatusHelper.generate_team_at_event_status('frc5240', self.event)
self.assertDictEqual(status, self.status_5240)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5240', status),
'Team 5240 was <b>Rank 6/36</b> with a record of <b>9-3-0</b> in quals, competed in the playoffs as the <b>1st Pick</b> of <b>Alliance 4</b>, and was <b>eliminated in the Semifinals</b> with a playoff record of <b>2-3-0</b>.')
def testBackupOut(self):
status = EventTeamStatusHelper.generate_team_at_event_status('frc229', self.event)
self.assertDictEqual(status, self.status_229)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc229', status),
'Team 229 was <b>Rank 20/36</b> with a record of <b>6-6-0</b> in quals, competed in the playoffs as the <b>2nd Pick</b> of <b>Alliance 2</b>, and was <b>eliminated in the Finals</b> with a playoff record of <b>5-3-0</b>.')
def testBackupIn(self):
status = EventTeamStatusHelper.generate_team_at_event_status('frc1665', self.event)
self.assertDictEqual(status, self.status_1665)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc1665', status),
'Team 1665 was <b>Rank 18/36</b> with a record of <b>6-6-0</b> in quals, competed in the playoffs as the <b>Backup</b> of <b>Alliance 2</b>, and was <b>eliminated in the Finals</b> with a playoff record of <b>5-3-0</b>.')
def testTeamNotPicked(self):
status = EventTeamStatusHelper.generate_team_at_event_status('frc5964', self.event)
self.assertDictEqual(status, self.status_5964)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5964', status),
'Team 5964 was <b>Rank 23/36</b> with a record of <b>6-6-0</b> in quals.')
class Test2016nytrEventTeamStatusHelperNoEventDetails(unittest2.TestCase):
status_359 = {
"alliance": None,
"playoff": {
"current_level_record": {
"losses": 1,
"ties": 0,
"wins": 2
},
"level": "f",
"playoff_average": None,
"record": {
"losses": 1,
"ties": 0,
"wins": 6
},
"status": "won"
},
"qual": {
"num_teams": 36,
"ranking": {
"dq": None,
"matches_played": 12,
"qual_average": None,
"rank": None,
"record": {
"losses": 1,
"ties": 0,
"wins": 11
},
"sort_orders": None,
"team_key": "frc359",
},
"sort_order_info": None,
"status": "completed"
}
}
status_5240 = {
"alliance": None,
"playoff": {
"current_level_record": {
"losses": 2,
"ties": 0,
"wins": 0
},
"level": "sf",
"playoff_average": None,
"record": {
"losses": 3,
"ties": 0,
"wins": 2
},
"status": "eliminated"
},
"qual": {
"num_teams": 36,
"ranking": {
"dq": None,
"matches_played": 12,
"qual_average": None,
"rank": None,
"record": {
"losses": 3,
"ties": 0,
"wins": 9
},
"sort_orders": None,
"team_key": "frc5240",
},
"sort_order_info": None,
"status": "completed"
}
}
status_229 = {
"alliance": None,
"playoff": {
"current_level_record": {
"losses": 2,
"ties": 0,
"wins": 1
},
"level": "f",
"playoff_average": None,
"record": {
"losses": 3,
"ties": 0,
"wins": 5
},
"status": "eliminated"
},
"qual": {
"num_teams": 36,
"ranking": {
"dq": None,
"matches_played": 12,
"qual_average": None,
"rank": None,
"record": {
"losses": 6,
"ties": 0,
"wins": 6
},
"sort_orders": None,
"team_key": "frc229",
},
"sort_order_info": None,
"status": "completed"
}
}
status_1665 = {
"alliance": None,
"playoff": {
"current_level_record": {
"losses": 2,
"ties": 0,
"wins": 1
},
"level": "f",
"playoff_average": None,
"record": {
"losses": 3,
"ties": 0,
"wins": 5
},
"status": "eliminated"
},
"qual": {
"num_teams": 36,
"ranking": {
"dq": None,
"matches_played": 12,
"qual_average": None,
"rank": None,
"record": {
"losses": 6,
"ties": 0,
"wins": 6
},
"sort_orders": None,
"team_key": "frc1665",
},
"sort_order_info": None,
"status": "completed"
}
}
status_5964 = {
"alliance": None,
"playoff": None,
"qual": {
"num_teams": 36,
"ranking": {
"dq": None,
"matches_played": 12,
"qual_average": None,
"rank": None,
"record": {
"losses": 6,
"ties": 0,
"wins": 6
},
"sort_orders": None,
"team_key": "frc5964",
},
"sort_order_info": None,
"status": "completed"
}
}
status_1124 = {
"qual": None,
"playoff": None,
"alliance": None
}
# Because I can't figure out how to get these to generate
def event_key_adder(self, obj):
obj.event = ndb.Key(Event, '2016nytr')
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_memcache_stub()
ndb.get_context().clear_cache() # Prevent data from leaking between tests
load_fixture('test_data/fixtures/2016nytr_event_team_status.json',
kind={'EventDetails': EventDetails, 'Event': Event, 'Match': Match},
post_processor=self.event_key_adder)
self.event = Event.get_by_id('2016nytr')
EventDetails.get_by_id('2016nytr').key.delete() # Remove EventDetails
self.assertIsNotNone(self.event)
def tearDown(self):
self.testbed.deactivate()
def testEventWinner(self):
status = EventTeamStatusHelper.generate_team_at_event_status('frc359', self.event)
self.assertDictEqual(status, self.status_359)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc359', status),
'Team 359 had a record of <b>11-1-0</b> in quals and <b>won the event</b> with a playoff record of <b>6-1-0</b>.')
def testElimSemisAndFirstPick(self):
status = EventTeamStatusHelper.generate_team_at_event_status('frc5240', self.event)
self.assertDictEqual(status, self.status_5240)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5240', status),
'Team 5240 had a record of <b>9-3-0</b> in quals and was <b>eliminated in the Semifinals</b> with a playoff record of <b>2-3-0</b>.')
def testBackupOut(self):
status = EventTeamStatusHelper.generate_team_at_event_status('frc229', self.event)
self.assertDictEqual(status, self.status_229)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc229', status),
'Team 229 had a record of <b>6-6-0</b> in quals and was <b>eliminated in the Finals</b> with a playoff record of <b>5-3-0</b>.')
def testBackupIn(self):
status = EventTeamStatusHelper.generate_team_at_event_status('frc1665', self.event)
self.assertDictEqual(status, self.status_1665)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc1665', status),
'Team 1665 had a record of <b>6-6-0</b> in quals and was <b>eliminated in the Finals</b> with a playoff record of <b>5-3-0</b>.')
def testTeamNotPicked(self):
status = EventTeamStatusHelper.generate_team_at_event_status('frc5964', self.event)
self.assertDictEqual(status, self.status_5964)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5964', status),
'Team 5964 had a record of <b>6-6-0</b> in quals.')
class Test2016casjEventTeamStatusHelperNoEventDetails(unittest2.TestCase):
status_254 = {
"alliance": None,
"playoff": {
"current_level_record": {
"losses": 0,
"ties": 0,
"wins": 2
},
"level": "f",
"playoff_average": None,
"record": {
"losses": 0,
"ties": 0,
"wins": 6
},
"status": "won"
},
"qual": {
"num_teams": 64,
"ranking": {
"dq": None,
"matches_played": 8,
"qual_average": None,
"rank": None,
"record": {
"losses": 0,
"ties": 0,
"wins": 8
},
"sort_orders": None,
"team_key": "frc254",
},
"sort_order_info": None,
"status": "completed"
}
}
# Because I can't figure out how to get these to generate
def event_key_adder(self, obj):
obj.event = ndb.Key(Event, '2016casj')
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_memcache_stub()
ndb.get_context().clear_cache() # Prevent data from leaking between tests
load_fixture('test_data/fixtures/2016casj.json',
kind={'EventDetails': EventDetails, 'Event': Event, 'Match': Match},
post_processor=self.event_key_adder)
self.event = Event.get_by_id('2016casj')
self.assertIsNotNone(self.event)
def tearDown(self):
self.testbed.deactivate()
def testEventSurrogate(self):
status = EventTeamStatusHelper.generate_team_at_event_status('frc254', self.event)
self.assertDictEqual(status, self.status_254)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc254', status),
'Team 254 had a record of <b>8-0-0</b> in quals and <b>won the event</b> with a playoff record of <b>6-0-0</b>.')
class Test2015casjEventTeamStatusHelper(unittest2.TestCase):
status_254 = {
"alliance": {
"backup": None,
"name": "Alliance 1",
"number": 1,
"pick": 0
},
"playoff": {
"current_level_record": {
"losses": 0,
"ties": 0,
"wins": 2
},
"level": "f",
"playoff_average": 224.14285714285714,
"record": None,
"status": "won"
},
"qual": {
"num_teams": 57,
"ranking": {
"dq": 0,
"matches_played": 10,
"qual_average": 200.4,
"rank": 1,
"record": None,
"sort_orders": [
200.4,
280.0,
200.0,
836.0,
522.0,
166.0
],
"team_key": "frc254"
},
"sort_order_info": [
{
"name": "Qual Avg.",
"precision": 1
},
{
"name": "Coopertition",
"precision": 0
},
{
"name": "Auto",
"precision": 0
},
{
"name": "Container",
"precision": 0
},
{
"name": "Tote",
"precision": 0
},
{
"name": "Litter",
"precision": 0
}
],
"status": "completed"
}
}
status_846 = {
"alliance": {
"backup": None,
"name": "Alliance 3",
"number": 3,
"pick": 1
},
"playoff": {
"current_level_record": None,
"level": "sf",
"playoff_average": 133.59999999999999,
"record": None,
"status": "eliminated"
},
"qual": {
"num_teams": 57,
"ranking": {
"dq": 0,
"matches_played": 10,
"qual_average": 97.0,
"rank": 8,
"record": None,
"sort_orders": [
97.0,
200.0,
20.0,
372.0,
294.0,
108.0
],
"team_key": "frc846"
},
"sort_order_info": [
{
"name": "Qual Avg.",
"precision": 1
},
{
"name": "Coopertition",
"precision": 0
},
{
"name": "Auto",
"precision": 0
},
{
"name": "Container",
"precision": 0
},
{
"name": "Tote",
"precision": 0
},
{
"name": "Litter",
"precision": 0
}
],
"status": "completed"
}
}
status_8 = {
"alliance": None,
"playoff": None,
"qual": {
"num_teams": 57,
"ranking": {
"dq": 0,
"matches_played": 10,
"qual_average": 42.6,
"rank": 53,
"record": None,
"sort_orders": [
42.6,
120.0,
0.0,
84.0,
150.0,
72.0
],
"team_key": "frc8"
},
"sort_order_info": [
{
"name": "Qual Avg.",
"precision": 1
},
{
"name": "Coopertition",
"precision": 0
},
{
"name": "Auto",
"precision": 0
},
{
"name": "Container",
"precision": 0
},
{
"name": "Tote",
"precision": 0
},
{
"name": "Litter",
"precision": 0
}
],
"status": "completed"
}
}
status_1124 = {
"qual": None,
"playoff": None,
"alliance": None
}
# Because I can't figure out how to get these to generate
def event_key_adder(self, obj):
obj.event = ndb.Key(Event, '2015casj')
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_memcache_stub()
ndb.get_context().clear_cache() # Prevent data from leaking between tests
load_fixture('test_data/fixtures/2015casj.json',
kind={'EventDetails': EventDetails, 'Event': Event, 'Match': Match},
post_processor=self.event_key_adder)
self.event = Event.get_by_id('2015casj')
self.assertIsNotNone(self.event)
def tearDown(self):
self.testbed.deactivate()
def testEventWinner(self):
status = EventTeamStatusHelper.generate_team_at_event_status('frc254', self.event)
self.assertDictEqual(status, self.status_254)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc254', status),
'Team 254 was <b>Rank 1/57</b> with an average score of <b>200.4</b> in quals, competed in the playoffs as the <b>Captain</b> of <b>Alliance 1</b>, and <b>won the event</b> with a playoff average of <b>224.1</b>.')
def testElimSemisAndFirstPick(self):
status = EventTeamStatusHelper.generate_team_at_event_status('frc846', self.event)
self.assertDictEqual(status, self.status_846)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc846', status),
'Team 846 was <b>Rank 8/57</b> with an average score of <b>97.0</b> in quals, competed in the playoffs as the <b>1st Pick</b> of <b>Alliance 3</b>, and was <b>eliminated in the Semifinals</b> with a playoff average of <b>133.6</b>.')
def testTeamNotPicked(self):
status = EventTeamStatusHelper.generate_team_at_event_status('frc8', self.event)
self.assertDictEqual(status, self.status_8)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc8', status),
'Team 8 was <b>Rank 53/57</b> with an average score of <b>42.6</b> in quals.')
class Test2015casjEventTeamStatusHelperNoEventDetails(unittest2.TestCase):
status_254 = {
"alliance": None,
"playoff": {
"current_level_record": {
"losses": 0,
"ties": 0,
"wins": 2
},
"level": "f",
"playoff_average": 224.14285714285714,
"record": None,
"status": "won"
},
"qual": {
"num_teams": 57,
"ranking": {
"dq": None,
"matches_played": 10,
"qual_average": 200.4,
"rank": None,
"record": None,
"sort_orders": None,
"team_key": "frc254",
},
"sort_order_info": None,
"status": "completed"
}
}
status_846 = {
"alliance": None,
"playoff": {
"current_level_record": None,
"level": "sf",
"playoff_average": 133.59999999999999,
"record": None,
"status": "eliminated"
},
"qual": {
"num_teams": 57,
"ranking": {
"dq": None,
"matches_played": 10,
"qual_average": 97.0,
"rank": None,
"record": None,
"sort_orders": None,
"team_key": "frc846",
},
"sort_order_info": None,
"status": "completed"
}
}
status_8 = {
"alliance": None,
"playoff": None,
"qual": {
"num_teams": 57,
"ranking": {
"dq": None,
"matches_played": 10,
"qual_average": 42.6,
"rank": None,
"record": None,
"sort_orders": None,
"team_key": "frc8",
},
"sort_order_info": None,
"status": "completed"
}
}
status_1124 = {
"qual": None,
"playoff": None,
"alliance": None
}
# Because I can't figure out how to get these to generate
def event_key_adder(self, obj):
obj.event = ndb.Key(Event, '2015casj')
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_memcache_stub()
ndb.get_context().clear_cache() # Prevent data from leaking between tests
load_fixture('test_data/fixtures/2015casj.json',
kind={'EventDetails': EventDetails, 'Event': Event, 'Match': Match},
post_processor=self.event_key_adder)
self.event = Event.get_by_id('2015casj')
EventDetails.get_by_id('2015casj').key.delete() # Remove EventDetails
self.assertIsNotNone(self.event)
def tearDown(self):
self.testbed.deactivate()
def testEventWinner(self):
status = EventTeamStatusHelper.generate_team_at_event_status('frc254', self.event)
self.assertDictEqual(status, self.status_254)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc254', status),
'Team 254 had an average score of <b>200.4</b> in quals and <b>won the event</b> with a playoff average of <b>224.1</b>.')
def testElimSemisAndFirstPick(self):
status = EventTeamStatusHelper.generate_team_at_event_status('frc846', self.event)
self.assertDictEqual(status, self.status_846)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc846', status),
'Team 846 had an average score of <b>97.0</b> in quals and was <b>eliminated in the Semifinals</b> with a playoff average of <b>133.6</b>.')
def testTeamNotPicked(self):
status = EventTeamStatusHelper.generate_team_at_event_status('frc8', self.event)
self.assertDictEqual(status, self.status_8)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc8', status),
'Team 8 had an average score of <b>42.6</b> in quals.')
| verycumbersome/the-blue-alliance | tests/test_event_team_status_helper.py | Python | mit | 55,040 |
require('can-view-import/can-view-import_test');
| tracer99/canjs | view/import/import_test.js | JavaScript | mit | 49 |
/*
* Copyright (c) 2006-2015 Rogério Liesenfeld
* This file is subject to the terms of the MIT license (see LICENSE.txt).
*/
package mockit.internal.expectations.argumentMatching;
import java.lang.reflect.Array;
import javax.annotation.*;
public class EqualityMatcher implements ArgumentMatcher<EqualityMatcher>
{
@Nullable final Object object;
EqualityMatcher(@Nullable Object equalArg) { object = equalArg; }
@Override
public boolean same(@Nonnull EqualityMatcher other) { return object == other.object; }
@Override
public boolean matches(@Nullable Object argValue) { return areEqual(argValue, object); }
@Override
public void writeMismatchPhrase(@Nonnull ArgumentMismatch argumentMismatch)
{
argumentMismatch.appendFormatted(object);
}
public static boolean areEqual(@Nullable Object o1, @Nullable Object o2)
{
if (o1 == null) {
return o2 == null;
}
else if (o2 == null) {
return false;
}
else if (o1 == o2) {
return true;
}
return areEqualWhenNonNull(o1, o2);
}
public static boolean areEqualWhenNonNull(@Nonnull Object o1, @Nonnull Object o2)
{
if (isArray(o1)) {
return isArray(o2) && areArraysEqual(o1, o2);
}
return o1.equals(o2);
}
private static boolean isArray(@Nonnull Object o) { return o.getClass().isArray(); }
private static boolean areArraysEqual(@Nonnull Object array1, @Nonnull Object array2)
{
int length1 = Array.getLength(array1);
if (length1 != Array.getLength(array2)) {
return false;
}
for (int i = 0; i < length1; i++) {
Object value1 = Array.get(array1, i);
Object value2 = Array.get(array2, i);
if (!areEqual(value1, value2)) {
return false;
}
}
return true;
}
}
| beluchin/jmockit1 | main/src/mockit/internal/expectations/argumentMatching/EqualityMatcher.java | Java | mit | 1,866 |
<?php
namespace YOOtheme\Widgetkit\Framework\Database;
interface DatabaseInterface
{
/**
* Fetches all rows of the result as an associative array.
*
* @param string $statement
* @param array $params
* @return array
*/
public function fetchAll($statement, array $params = array());
/**
* Fetches the first row of the result as an associative array.
*
* @param string $statement
* @param array $params
* @return array
*/
public function fetchAssoc($statement, array $params = array());
/**
* Fetches the first row of the result as a numerically indexed array.
*
* @param string $statement
* @param array $params
* @return array
*/
public function fetchArray($statement, array $params = array());
/**
* Prepares and executes an SQL query and returns the first row of the result as an object.
*
* @param string $statement
* @param array $params
* @param string $class
* @param array $args
* @return mixed
*/
public function fetchObject($statement, array $params = array(), $class = 'stdClass', $args = array());
/**
* Prepares and executes an SQL query and returns the result as an array of objects.
*
* @param string $statement
* @param array $params
* @param string $class
* @param array $args
* @return array
*/
public function fetchAllObjects($statement, array $params = array(), $class = 'stdClass', $args = array());
/**
* Executes an, optionally parametrized, SQL query.
*
* @param string $query
* @param array $params
*
* @return int|false
*/
public function executeQuery($query, array $params = array());
/**
* Inserts a table row with specified data.
*
* @param string $table
* @param array $data
* @return int
*/
public function insert($table, array $data);
/**
* Updates a table row with specified data.
*
* @param string $table
* @param array $data
* @param array $identifier
* @return int
*/
public function update($table, array $data, array $identifier);
/**
* Deletes a table row.
*
* @param string $table
* @param array $identifier
* @return int
*/
public function delete($table, array $identifier);
/**
* Escapes a string for usage in an SQL statement.
*
* @param string $text
* @return string
*/
public function escape($text);
/**
* Retrieves the last inserted id.
*
* @return int
*/
public function lastInsertId();
}
| yaelduckwen/libriastore | joomla/administrator/components/com_widgetkit/src/Framework/src/Database/DatabaseInterface.php | PHP | mit | 2,712 |
import COMMAND from '../../../session/command';
import FileListWrapper from './file-list-wrapper';
import nativeMethods from '../native-methods';
import transport from '../../transport';
import settings from '../../settings';
import * as Browser from '../../utils/browser';
import * as HiddenInfo from './hidden-info';
import SHADOW_UI_CLASSNAME from '../../../shadow-ui/class-name';
import Promise from 'pinkie';
// NOTE: https://html.spec.whatwg.org/multipage/forms.html#fakepath-srsly.
const FAKE_PATH_STRING = 'C:\\fakepath\\';
const UPLOAD_IFRAME_FOR_IE9_ID = 'uploadIframeForIE9' + SHADOW_UI_CLASSNAME.postfix;
export default class UploadInfoManager {
constructor (shadowUI) {
this.shadowUI = shadowUI;
this.uploadInfo = [];
}
static _getFileListData (fileList) {
var data = [];
for (var i = 0; i < fileList.length; i++)
data.push(fileList[i].base64);
return data;
}
static _getUploadIframeForIE9 () {
var uploadIframe = nativeMethods.querySelector.call(document, '#' + UPLOAD_IFRAME_FOR_IE9_ID);
if (!uploadIframe) {
uploadIframe = nativeMethods.createElement.call(document, 'iframe');
nativeMethods.setAttribute.call(uploadIframe, 'id', UPLOAD_IFRAME_FOR_IE9_ID);
nativeMethods.setAttribute.call(uploadIframe, 'name', UPLOAD_IFRAME_FOR_IE9_ID);
uploadIframe.style.display = 'none';
nativeMethods.appendChild.call(this.shadowUI.getRoot(), uploadIframe);
}
return uploadIframe;
}
_loadFileListDataForIE9 (input) {
return Promise(resolve => {
var form = input.form;
if (form && input.value) {
var sourceTarget = form.target;
var sourceActionString = form.action;
var sourceMethod = form.method;
var uploadIframe = UploadInfoManager._getUploadIframeForIE9();
var loadHandler = () => {
var fileListWrapper = new FileListWrapper([JSON.parse(uploadIframe.contentWindow.document.body.innerHTML)]);
uploadIframe.removeEventListener('load', loadHandler);
resolve(fileListWrapper);
};
uploadIframe.addEventListener('load', loadHandler);
form.action = settings.get().ie9FileReaderShimUrl + '?input-name=' + input.name + '&filename=' +
input.value;
form.target = UPLOAD_IFRAME_FOR_IE9_ID;
form.method = 'post';
nativeMethods.formSubmit.call(form);
form.action = sourceActionString;
form.target = sourceTarget;
form.method = sourceMethod;
}
else
resolve(new FileListWrapper([]));
});
}
static formatValue (fileNames) {
var value = '';
fileNames = typeof fileNames === 'string' ? [fileNames] : fileNames;
if (fileNames && fileNames.length) {
if (Browser.isWebKit)
value = FAKE_PATH_STRING + fileNames[0].split('/').pop();
else if (Browser.isIE9 || Browser.isIE10) {
var filePaths = [];
for (var i = 0; i < fileNames.length; i++)
filePaths.push(FAKE_PATH_STRING + fileNames[i].split('/').pop());
value = filePaths.join(', ');
}
else
return fileNames[0].split('/').pop();
}
return value;
}
static getFileNames (fileList, value) {
var result = [];
if (fileList) {
for (var i = 0; i < fileList.length; i++)
result.push(fileList[i].name);
}
else if (value.lastIndexOf('\\') !== -1)
result.push(value.substr(value.lastIndexOf('\\') + 1));
return result;
}
static loadFilesInfoFromServer (filePaths) {
return transport.asyncServiceMsg({
cmd: COMMAND.getUploadedFiles,
filePaths: typeof filePaths === 'string' ? [filePaths] : filePaths
});
}
static prepareFileListWrapper (filesInfo) {
var errs = [];
var validFilesInfo = [];
for (var i = 0; i < filesInfo.length; i++) {
if (filesInfo[i].err)
errs.push(filesInfo[i]);
else
validFilesInfo.push(filesInfo[i]);
}
return {
errs: errs,
fileList: new FileListWrapper(validFilesInfo)
};
}
static sendFilesInfoToServer (fileList, fileNames) {
return transport.asyncServiceMsg({
cmd: COMMAND.uploadFiles,
data: UploadInfoManager._getFileListData(fileList),
fileNames: fileNames
});
}
clearUploadInfo (input) {
var inputInfo = this.getUploadInfo(input);
if (inputInfo) {
inputInfo.files = new FileListWrapper([]);
inputInfo.value = '';
return HiddenInfo.removeInputInfo(input);
}
return null;
}
getFiles (input) {
var inputInfo = this.getUploadInfo(input);
return inputInfo ? inputInfo.files : new FileListWrapper([]);
}
getUploadInfo (input) {
for (var i = 0; i < this.uploadInfo.length; i++) {
if (this.uploadInfo[i].input === input)
return this.uploadInfo[i];
}
return null;
}
getValue (input) {
var inputInfo = this.getUploadInfo(input);
return inputInfo ? inputInfo.value : '';
}
loadFileListData (input, fileList) {
/*eslint-disable no-else-return */
if (Browser.isIE9)
return this._loadFileListDataForIE9(input);
else if (!fileList.length)
return Promise.resolve(new FileListWrapper([]));
else {
return new Promise(resolve => {
var index = 0;
var fileReader = new FileReader();
var file = fileList[index];
var readedFiles = [];
fileReader.addEventListener('load', e => {
readedFiles.push({
data: e.target.result.substr(e.target.result.indexOf(',') + 1),
blob: file.slice(0, file.size),
info: {
type: file.type,
name: file.name,
lastModifiedDate: file.lastModifiedDate
}
});
if (fileList[++index]) {
file = fileList[index];
fileReader.readAsDataURL(file);
}
else
resolve(new FileListWrapper(readedFiles));
});
fileReader.readAsDataURL(file);
});
}
/*eslint-enable no-else-return */
}
setUploadInfo (input, fileList, value) {
var inputInfo = this.getUploadInfo(input);
if (!inputInfo) {
inputInfo = { input: input };
this.uploadInfo.push(inputInfo);
}
inputInfo.files = fileList;
inputInfo.value = value;
HiddenInfo.addInputInfo(input, fileList, value);
}
}
| georgiy-abbasov/testcafe-hammerhead | src/client/sandbox/upload/info-manager.js | JavaScript | mit | 7,469 |
import pyrox.filtering as filtering
class EmptyFilter(filtering.HttpFilter):
pass
| akatrevorjay/pyrox | pyrox/stock_filters/empty.py | Python | mit | 88 |
import subprocess
import os
li = [i for i in os.walk(os.getcwd())]
print(li)
for di in li:
root = di[0]
for fi in di[2]:
lent = len(fi)
if fi[lent-4:lent] == ".jpg":
fi_path = os.path.join(root, fi)
output_file = fi[:lent-4] + "_output"
print(output_file)
subprocess.call(["tesseract", fi, output_file])
| wonkishtofu/Tesseract-OCR-Tessa | tessa/orbiturary/pictures/get_text.py | Python | mit | 380 |
// Package languages provides language rules to use with the inflect package.
package languages
import (
"github.com/ipfs/go-ipfs/Godeps/_workspace/src/github.com/chuckpreslar/inflect/types"
)
// Defines irregular words, uncountables words, and pluralization/singularization rules for the English language.
//
// FIXME: Singular/Plural rules could be better, I went to school for engineering, not English.
var English = types.Language("en").
// Pluralization rules.
Plural(`(auto)$`, `${1}s`).
Plural(`(s|ss|sh|ch|x|to|ro|ho|jo)$`, `${1}es`).
Plural(`(i)fe$`, `${1}ves`).
Plural(`(t|f|g)oo(th|se|t)$`, `${1}ee${2}`).
Plural(`(a|e|i|o|u)y$`, `${1}ys`).
Plural(`(m|l)ouse$`, `${1}ice`).
Plural(`(al|ie|l)f$`, `${1}ves`).
Plural(`(d)ice`, `${1}ie`).
Plural(`y$`, `ies`).
Plural(`$`, `s`).
// Singularization rules.
Singular(`(auto)s$`, `${1}`).
Singular(`(rse)s$`, `${1}`).
Singular(`(s|ss|sh|ch|x|to|ro|ho|jo)es$`, `${1}`).
Singular(`(i)ves$`, `${1}fe`).
Singular(`(t|f|g)ee(th|se|t)$`, `${1}oo${2}`).
Singular(`(a|e|i|o|u)ys$`, `${1}y`).
Singular(`(m|l)ice$`, `${1}ouse`).
Singular(`(al|ie|l)ves$`, `${1}f`).
Singular(`(l)ies`, `${1}ie`).
Singular(`ies$`, `y`).
Singular(`(d)ie`, `${1}ice`).
Singular(`s$`, ``).
// Irregulars words.
Irregular(`person`, `people`).
Irregular(`child`, `children`).
// Uncountables words.
Uncountable(`fish`).
Uncountable(`sheep`).
Uncountable(`deer`).
Uncountable(`tuna`).
Uncountable(`salmon`).
Uncountable(`trout`).
Uncountable(`music`).
Uncountable(`art`).
Uncountable(`love`).
Uncountable(`happiness`).
Uncountable(`advice`).
Uncountable(`information`).
Uncountable(`news`).
Uncountable(`furniture`).
Uncountable(`luggage`).
Uncountable(`rice`).
Uncountable(`sugar`).
Uncountable(`butter`).
Uncountable(`water`).
Uncountable(`electricity`).
Uncountable(`gas`).
Uncountable(`power`).
Uncountable(`money`).
Uncountable(`currency`).
Uncountable(`scenery`)
| david415/go-ipfs | Godeps/_workspace/src/github.com/chuckpreslar/inflect/languages/english.go | GO | mit | 1,943 |
# frozen_string_literal: true
require 'sprockets_test'
$file_stat_calls = nil
class << File
alias_method :original_stat, :stat
def stat(filename)
if $file_stat_calls
$file_stat_calls[filename.to_s] ||= []
$file_stat_calls[filename.to_s] << caller
end
original_stat(filename)
end
end
$dir_entires_calls = nil
class << Dir
alias_method :original_entries, :entries
def entries(dirname, **args)
if $dir_entires_calls
$dir_entires_calls[dirname.to_s] ||= []
$dir_entires_calls[dirname.to_s] << caller
end
original_entries(dirname, **args)
end
end
class TestPerformance < Sprockets::TestCase
class Cache
def initialize
@cache = {}
end
def get(key)
$cache_get_calls[key] ||= []
$cache_get_calls[key] << caller
@cache[key]
end
def set(key, value)
$cache_set_calls[key] ||= []
$cache_set_calls[key] << caller
@cache[key] = value
end
end
def setup
@env = new_environment
reset_stats!
end
def teardown
$file_stat_calls = nil
$dir_entires_calls = nil
$processor_calls = nil
$bundle_processor_calls = nil
$cache_get_calls = nil
$cache_set_calls = nil
end
test "simple file" do
@env["gallery.js"].to_s
assert_no_redundant_stat_calls
assert_no_redundant_processor_calls
assert_no_redundant_bundle_processor_calls
end
test "cached simple file" do
@env.cached["gallery.js"].to_s
assert_no_redundant_stat_calls
assert_no_redundant_processor_calls
assert_no_redundant_bundle_processor_calls
end
test "file with deps" do
@env["mobile.js"].to_s
assert_no_redundant_stat_calls
assert_no_redundant_processor_calls
assert_no_redundant_bundle_processor_calls
end
test "cached file with deps" do
@env.cached["mobile.js"].to_s
assert_no_redundant_stat_calls
assert_no_redundant_processor_calls
assert_no_redundant_bundle_processor_calls
end
test "loading from backend cache" do
env1, env2 = new_environment, new_environment
cache = Cache.new
env1.cache = cache
env2.cache = cache
env1["mobile.js"]
assert_no_redundant_processor_calls
assert_no_redundant_bundle_processor_calls
assert_no_redundant_cache_set_calls
reset_stats!
env2["mobile.js"]
assert_no_redundant_stat_calls
assert_no_processor_calls
assert_no_bundle_processor_calls
assert_no_redundant_cache_get_calls
assert_no_cache_set_calls
end
test "moving root of project after generation is still freaky fast" do
env1 = new_environment
env1.cache = Cache.new
env1["mobile.js"]
assert_no_redundant_processor_calls
assert_no_redundant_bundle_processor_calls
assert_no_redundant_cache_set_calls
Dir.mktmpdir do |dir|
Dir.chdir(dir) do
`cp -R #{File.join(fixture_path("default"), "*")} .`
env2 = new_environment("./default")
env2.cache = env1.cache
reset_stats!
env2["mobile.js"]
assert_no_redundant_stat_calls
assert_no_processor_calls
assert_no_bundle_processor_calls
assert_no_redundant_cache_get_calls
assert_no_cache_set_calls
end
end
end
test "loading from instance cache" do
env = @env.cached
env["mobile.js"]
assert_no_redundant_processor_calls
assert_no_redundant_bundle_processor_calls
reset_stats!
env["mobile.js"]
assert_no_stat_calls
assert_no_processor_calls
assert_no_bundle_processor_calls
end
test "loading from cached with backend cache" do
env1, env2 = new_environment, new_environment
cache = Cache.new
env1.cache = cache
env2.cache = cache
env1.cached["mobile.js"]
assert_no_redundant_processor_calls
assert_no_redundant_bundle_processor_calls
assert_no_redundant_cache_set_calls
reset_stats!
env2.cached["mobile.js"]
assert_no_redundant_stat_calls
assert_no_processor_calls
assert_no_bundle_processor_calls
assert_no_redundant_cache_get_calls
assert_no_cache_set_calls
end
test "rollback version" do
env = new_environment
env.cache = Cache.new
env.version = "1"
assert asset = env["mobile.js"]
id1 = asset.id
assert_no_redundant_processor_calls
assert_no_redundant_bundle_processor_calls
assert_no_redundant_cache_set_calls
reset_stats!
env.version = "2"
assert asset = env["mobile.js"]
id2 = asset.id
assert_no_redundant_processor_calls
assert_no_redundant_bundle_processor_calls
assert_no_redundant_cache_set_calls
reset_stats!
env.version = "1"
assert asset = env["mobile.js"]
assert_equal id1, asset.id
assert_no_redundant_stat_calls
assert_no_processor_calls
assert_no_bundle_processor_calls
assert_no_redundant_cache_get_calls
assert_no_cache_set_calls
reset_stats!
env.version = "2"
assert asset = env["mobile.js"]
assert_equal id2, asset.id
assert_no_redundant_stat_calls
assert_no_processor_calls
assert_no_bundle_processor_calls
assert_no_redundant_cache_get_calls
assert_no_cache_set_calls
end
test "rollback path change" do
env = new_environment
env.cache = Cache.new
env.clear_paths
env.append_path(fixture_path('default'))
assert asset = env["mobile.js"]
path1 = asset.id
assert_no_redundant_processor_calls
assert_no_redundant_bundle_processor_calls
assert_no_redundant_cache_set_calls
reset_stats!
env.clear_paths
env.append_path(fixture_path('asset'))
env.append_path(fixture_path('default'))
assert asset = env["mobile.js"]
path2 = asset.id
assert_no_redundant_processor_calls
assert_no_redundant_bundle_processor_calls
assert_no_redundant_cache_set_calls
reset_stats!
env.clear_paths
env.append_path(fixture_path('default'))
assert asset = env["mobile.js"]
assert_equal path1, asset.id
assert_no_redundant_stat_calls
assert_no_processor_calls
assert_no_bundle_processor_calls
assert_no_redundant_cache_get_calls
assert_no_cache_set_calls
reset_stats!
env.clear_paths
env.append_path(fixture_path('asset'))
env.append_path(fixture_path('default'))
assert asset = env["mobile.js"]
assert_equal path2, asset.id
assert_no_redundant_stat_calls
assert_no_processor_calls
assert_no_bundle_processor_calls
assert_no_redundant_cache_get_calls
assert_no_cache_set_calls
end
test "rollback file change" do
env = new_environment
env.cache = Cache.new
filename = fixture_path("default/tmp.js")
sandbox filename do
write(filename, "a;", 1421000000)
reset_stats!
assert asset = env["tmp.js"]
assert_equal "a;\n", asset.source
ida = asset.id
assert_no_redundant_processor_calls
assert_no_redundant_bundle_processor_calls
assert_no_redundant_cache_set_calls
write(filename, "b;", 1421000001)
reset_stats!
assert asset = env["tmp.js"]
assert_equal "b;\n", asset.source
idb = asset.id
assert_no_redundant_processor_calls
assert_no_redundant_bundle_processor_calls
assert_no_redundant_cache_set_calls
write(filename, "a;", 1421000000)
reset_stats!
assert asset = env["tmp.js"]
assert_equal "a;\n", asset.source
assert_equal ida, asset.id
assert_no_redundant_stat_calls
assert_no_processor_calls
assert_no_bundle_processor_calls
assert_no_redundant_cache_get_calls
assert_no_cache_set_calls
write(filename, "b;", 1421000001)
reset_stats!
assert asset = env["tmp.js"]
assert_equal "b;\n", asset.source
assert_equal idb, asset.id
assert_no_redundant_stat_calls
assert_no_processor_calls
assert_no_bundle_processor_calls
assert_no_redundant_cache_get_calls
assert_no_cache_set_calls
end
end
test "rollback file dependency change" do
env = new_environment
env.cache = Cache.new
main = fixture_path("default/tmp-main.js")
dep = fixture_path("default/tmp-dep.js")
sandbox main, dep do
write(main, "//= require ./tmp-dep", 1421000000)
write(dep, "a;", 1421000000)
reset_stats!
assert asset = env["tmp-main.js"]
assert_equal "a;\n", asset.source
ida = asset.id
assert_no_redundant_processor_calls
assert_no_redundant_bundle_processor_calls
assert_no_redundant_cache_set_calls
write(dep, "b;", 1421000001)
reset_stats!
assert asset = env["tmp-main.js"]
assert_equal "b;\n", asset.source
idb = asset.id
assert_no_redundant_processor_calls
assert_no_redundant_bundle_processor_calls
assert_no_redundant_cache_set_calls
write(dep, "a;", 1421000000)
reset_stats!
assert asset = env["tmp-main.js"]
assert_equal "a;\n", asset.source
assert_equal ida, asset.id
assert_no_redundant_stat_calls
assert_no_processor_calls
assert_no_bundle_processor_calls
assert_no_redundant_cache_get_calls
assert_no_cache_set_calls
write(dep, "b;", 1421000001)
reset_stats!
assert asset = env["tmp-main.js"]
assert_equal "b;\n", asset.source
assert_equal idb, asset.id
assert_no_redundant_stat_calls
assert_no_processor_calls
assert_no_bundle_processor_calls
assert_no_redundant_cache_get_calls
assert_no_cache_set_calls
end
end
test "rollback file dependency add/remove" do
env = new_environment
env.cache = Cache.new
main = fixture_path("default/tmp.js")
deps = fixture_path("default/tmp")
depa = fixture_path("default/tmp/a.js")
depb = fixture_path("default/tmp/b.js")
sandbox main, deps, depa, depb do
FileUtils.mkdir_p(deps)
write(main, "//= require_directory ./tmp", 1421000000)
write(depa, "a;", 1421000000)
File.utime(1421000000, 1421000000, deps)
reset_stats!
assert asset = env["tmp.js"]
assert_equal "a;\n", asset.source
ida = asset.id
assert_no_redundant_processor_calls
assert_no_redundant_bundle_processor_calls
assert_no_redundant_cache_set_calls
write(depb, "b;", 142100001)
File.utime(1421000001, 1421000001, deps)
reset_stats!
assert asset = env["tmp.js"]
assert_equal "a;\nb;\n", asset.source
idab = asset.id
assert_no_redundant_processor_calls
assert_no_redundant_bundle_processor_calls
assert_no_redundant_cache_set_calls
FileUtils.rm(depb)
File.utime(1421000000, 1421000000, deps)
reset_stats!
assert asset = env["tmp.js"]
assert_equal "a;\n", asset.source
assert_equal ida, asset.id
assert_no_redundant_stat_calls
assert_no_processor_calls
assert_no_bundle_processor_calls
assert_no_redundant_cache_get_calls
assert_no_redundant_cache_set_calls
write(depb, "b;", 142100001)
File.utime(1421000001, 1421000001, deps)
reset_stats!
assert asset = env["tmp.js"]
assert_equal "a;\nb;\n", asset.source
assert_equal idab, asset.id
assert_no_redundant_stat_calls
assert_no_processor_calls
assert_no_bundle_processor_calls
assert_no_redundant_cache_get_calls
assert_no_redundant_cache_set_calls
end
end
def new_environment(path = fixture_path('default'))
Sprockets::Environment.new(".") do |env|
env.cache = Cache.new
env.append_path(path)
env.register_preprocessor 'application/javascript', proc { |input|
$processor_calls[input[:filename]] ||= []
$processor_calls[input[:filename]] << caller
nil
}
env.register_bundle_processor 'application/javascript', proc { |input|
$bundle_processor_calls[input[:filename]] ||= []
$bundle_processor_calls[input[:filename]] << caller
nil
}
end
end
def reset_stats!
$file_stat_calls = {}
$dir_entires_calls = {}
$processor_calls = {}
$bundle_processor_calls = {}
$cache_get_calls = {}
$cache_set_calls = {}
end
def assert_no_stat_calls
$file_stat_calls.each do |path, callers|
assert_equal 0, callers.size, "File.stat(#{path.inspect}) called #{callers.size} times\n\n#{format_callers(callers)}"
end
$dir_entires_calls.each do |path, callers|
assert_equal 0, callers.size, "Dir.entries(#{path.inspect}) called #{callers.size} times\n\n#{format_callers(callers)}"
end
end
def assert_no_redundant_stat_calls
$file_stat_calls.each do |path, callers|
assert_equal 1, callers.size, "File.stat(#{path.inspect}) called #{callers.size} times\n\n#{format_callers(callers)}"
end
$dir_entires_calls.each do |path, callers|
assert_equal 1, callers.size, "Dir.entries(#{path.inspect}) called #{callers.size} times\n\n#{format_callers(callers)}"
end
end
def assert_no_processor_calls
$processor_calls.each do |path, callers|
assert_equal 0, callers.size, "Processor ran on #{path.inspect} #{callers.size} times\n\n#{format_callers(callers)}"
end
end
def assert_no_redundant_processor_calls
$processor_calls.each do |path, callers|
assert_equal 1, callers.size, "Processor ran on #{path.inspect} #{callers.size} times\n\n#{format_callers(callers)}"
end
end
def assert_no_bundle_processor_calls
$bundle_processor_calls.each do |path, callers|
assert_equal 0, callers.size, "Bundle Processor ran on #{path.inspect} #{callers.size} times\n\n#{format_callers(callers)}"
end
end
def assert_no_redundant_bundle_processor_calls
$bundle_processor_calls.each do |path, callers|
assert_equal 1, callers.size, "Bundle Processor ran on #{path.inspect} #{callers.size} times\n\n#{format_callers(callers)}"
end
end
def assert_no_redundant_cache_get_calls
$cache_get_calls.each do |key, callers|
assert_equal 1, callers.size, "cache get #{key.inspect} #{callers.size} times\n\n#{format_callers(callers)}"
end
end
def assert_no_cache_set_calls
$cache_set_calls.each do |key, callers|
assert_equal 0, callers.size, "cache set #{key.inspect} #{callers.size} times\n\n#{format_callers(callers)}"
end
end
def assert_no_redundant_cache_set_calls
$cache_set_calls.each do |key, callers|
assert_equal 1, callers.size, "cache set #{key.inspect} #{callers.size} times\n\n#{format_callers(callers)}"
end
end
def format_callers(callers)
callers.map { |c| c.join("\n") }.join("\n\n\n")
end
end
| eileencodes/sprockets | test/test_performance.rb | Ruby | mit | 14,649 |
import { newE2EPage } from '@stencil/core/testing';
import { listenForEvent, waitForFunctionTestContext } from '../../../test/utils';
test(`animation:web: hooks`, async () => {
const page = await newE2EPage({ url: '/src/utils/animation/test/hooks' });
const screenshotCompares = [];
screenshotCompares.push(await page.compareScreenshot());
const square = await page.$('.square-a');
const styles = await getStyles(page, '.square-a');
expect(styles.paddingBottom).toEqual('20px');
expect(styles.color).toEqual('rgb(0, 0, 0)');
const classList = await getClassList(square);
expect(classList.includes('hello-world')).toEqual(true);
expect(classList.includes('test-class')).toEqual(false);
await waitForEventToBeCalled('afterWrite', page, square, async () => {
await waitForEventToBeCalled('afterRead', page, square, async () => {
await waitForEventToBeCalled('ionAnimationFinished', page, square, async () => {
await waitForEventToBeCalled('beforeWrite', page, square, async () => {
await waitForEventToBeCalled('beforeRead', page, square, async () => {
await page.click('.play');
await page.waitForSelector('.play');
// Test beforeRemoveClass and beforeAddClass
const webClassListAgain = await getClassList(square);
expect(webClassListAgain.includes('hello-world')).toEqual(false);
expect(webClassListAgain.includes('test-class')).toEqual(true);
// Test beforeStyles and beforeClearStyles
const webStylesAgain = await getStyles(page, '.square-a');
expect(webStylesAgain.paddingBottom).toEqual('0px');
expect(webStylesAgain.color).toEqual('rgb(128, 0, 128)');
});
});
});
});
});
// Test afterRemoveClass and afterAddClass
const classListAgain = await getClassList(square);
expect(classListAgain.includes('hello-world')).toEqual(true);
expect(classListAgain.includes('test-class')).toEqual(false);
// Test afterStyles and afterClearStyles
const stylesAgain = await getStyles(page, '.square-a');
expect(stylesAgain.paddingBottom).toEqual('20px');
expect(stylesAgain.color).toEqual('rgb(0, 0, 0)');
screenshotCompares.push(await page.compareScreenshot('end animation'));
});
test(`animation:css: hooks`, async () => {
const page = await newE2EPage({ url: '/src/utils/animation/test/hooks?ionic:_forceCSSAnimations=true' });
const screenshotCompares = [];
screenshotCompares.push(await page.compareScreenshot());
const square = await page.$('.square-a');
const styles = await getStyles(page, '.square-a');
expect(styles.paddingBottom).toEqual('20px');
expect(styles.color).toEqual('rgb(0, 0, 0)');
const classList = await getClassList(square);
expect(classList.includes('hello-world')).toEqual(true);
expect(classList.includes('test-class')).toEqual(false);
await waitForEventToBeCalled('afterWrite', page, square, async () => {
await waitForEventToBeCalled('afterRead', page, square, async () => {
await waitForEventToBeCalled('ionAnimationFinished', page, square, async () => {
await waitForEventToBeCalled('beforeWrite', page, square, async () => {
await waitForEventToBeCalled('beforeRead', page, square, async () => {
await page.click('.play');
await page.waitForSelector('.play');
// Test beforeRemoveClass and beforeAddClass
const cssClassListAgain = await getClassList(square);
expect(cssClassListAgain.includes('hello-world')).toEqual(false);
expect(cssClassListAgain.includes('test-class')).toEqual(true);
// Test beforeStyles and beforeClearStyles
const cssStylesAgain = await getStyles(page, '.square-a');
expect(cssStylesAgain.paddingBottom).toEqual('0px');
expect(cssStylesAgain.color).toEqual('rgb(128, 0, 128)');
});
});
});
});
});
// Test afterRemoveClass and afterAddClass
const classListAgain = await getClassList(square);
expect(classListAgain.includes('hello-world')).toEqual(true);
expect(classListAgain.includes('test-class')).toEqual(false);
// Test afterStyles and afterClearStyles
const stylesAgain = await getStyles(page, '.square-a');
expect(stylesAgain.paddingBottom).toEqual('20px');
expect(stylesAgain.color).toEqual('rgb(0, 0, 0)');
screenshotCompares.push(await page.compareScreenshot('end animation'));
});
const waitForEventToBeCalled = (eventName: string, page: any, el: HTMLElement, fn: any, num = 1) => {
return new Promise(async resolve => {
const EVENT_FIRED = `on${eventName}`;
const eventFiredCount: any = { count: 0 };
await page.exposeFunction(EVENT_FIRED, () => {
eventFiredCount.count += 1;
});
await listenForEvent(page, eventName, el, EVENT_FIRED);
if (fn) {
await fn();
}
await waitForFunctionTestContext((payload: any) => {
return payload.eventFiredCount.count === payload.num;
}, { eventFiredCount, num });
return resolve();
});
};
const getStyles = async (page: any, selector: string) => {
return page.evaluate((payload: any) => {
const el = document.querySelector(payload.selector);
return JSON.parse(JSON.stringify(getComputedStyle(el)));
}, { selector });
};
const getClassList = async (el: HTMLElement) => {
const classListObject = await el.getProperty('classList');
const jsonValue = await classListObject.jsonValue();
return Object.values(jsonValue);
};
| driftyco/ionic | core/src/utils/animation/test/hooks/e2e.ts | TypeScript | mit | 5,554 |
/* Get Programming with JavaScript
* Listing 12.04
* Guess the random number
*/
var getGuesser = function () {
var secret = Math.floor(Math.random() * 10 + 1);
return function (userNumber) {
if (userNumber === secret) {
return "Well done!";
} else {
return "Unlucky, try again.";
}
};
};
var guess = getGuesser();
/* Further Adventures
*
* 1) Run the program.
*
* 2) Play the game a few times on the console.
* e.g. guess(2)
*
* 3) Change the code so the secret number is
* between 30 and 50.
*
* 4) Test your changes.
*
* CHALLENGE: Create a function called 'between'
* that returns a random whole number between two
* numbers passed as arguments.
*
* e.g. between(1, 5) // 1 <= whole number <= 5
* between(100, 200) // 100 <= whole number <= 200
*
*/ | jrlarsen/GetProgramming | Ch12_Conditions/listing12.04.js | JavaScript | mit | 829 |
import math
FREQ = 3000
V = 13.3
REALRPM = 305.6
LIMIT = 2
PWM_IN_MIN = 1100
PWM_IN_MAX = 2000
RPM_MAX = 2000.0 # rpm
RPM_MIN = 300.0 # rpm
# GT: Changed constants?
RPM_SLEW = 10000.0 # rpm/s
DT_LOOP = 0.001 # seconds per slow loop
KP_RPM_UP = 0.3 # mA/rpm
KI_RPM = 0.002 # mA/rpm/s
I_SAT_RPM = 20.0 # mA
KFF_I = 4.000e-5 # mA/rpm^2
# KFF_V = 0.00038 # (0-255)/rpm
KFF_V = 0.0 # (0-255)/rpm
AMAX = 20.0 # max accelerating current [A]
BMAX = 5.0 # max braking current [A]
# RPM controller
KP_EST_RPM = 2
KI_EST_RPM = 0.02
KPQ = 1.000 / 500.0 # [LSB/mA/loop] ~= [1V/A/s at 24VDC]
KPD = 0.3 / 500.0 # [LSB/mA/loop] ~= ??? calculate
CHAR_90_DEG = 64
LOOP_INTERVAL = 4
#iafactor = 0.03
#ibicfactor = 0.02
iafactor = 0.03
ibicfactor = 0.02
SQRT3DIV2 = 0.866025404
ONEDIVSQRT3 = 1.0/math.sqrt(3.0)
TWODIVSQRT3 = 2.0/math.sqrt(3.0)
BEMFK = 0.00537
| gtoonstra/foc_esc | escsim/constants.py | Python | mit | 918 |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Functional operations.
## Higher Order Operators
TensorFlow provides several higher order operators to simplify the common
map-reduce programming patterns.
@@map_fn
@@foldl
@@foldr
@@scan
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import tensor_array_ops
from tensorflow.python.ops import variable_scope as vs
# go/tf-wildcard-import
# pylint: disable=wildcard-import
from tensorflow.python.ops.gen_functional_ops import *
# pylint: enable=wildcard-import
# pylint: disable=unused-import
from tensorflow.python.ops.gen_functional_ops import _symbolic_gradient
# pylint: enable=unused-import
from tensorflow.python.util import nest
# TODO(yuanbyu, mrry): Handle stride to support sliding windows.
def foldl(fn, elems, initializer=None, parallel_iterations=10, back_prop=True,
swap_memory=False, name=None):
"""foldl on the list of tensors unpacked from `elems` on dimension 0.
This foldl operator repeatedly applies the callable `fn` to a sequence
of elements from first to last. The elements are made of the tensors
unpacked from `elems` on dimension 0. The callable fn takes two tensors as
arguments. The first argument is the accumulated value computed from the
preceding invocation of fn. If `initializer` is None, `elems` must contain
at least one element, and its first element is used as the initializer.
Suppose that `elems` is unpacked into `values`, a list of tensors. The shape
of the result tensor is fn(initializer, values[0]).shape`.
Args:
fn: The callable to be performed.
elems: A tensor to be unpacked on dimension 0.
initializer: (optional) The initial value for the accumulator.
parallel_iterations: (optional) The number of iterations allowed to run
in parallel.
back_prop: (optional) True enables support for back propagation.
swap_memory: (optional) True enables GPU-CPU memory swapping.
name: (optional) Name prefix for the returned tensors.
Returns:
A tensor resulting from applying `fn` consecutively to the list of tensors
unpacked from `elems`, from first to last.
Raises:
TypeError: if `fn` is not callable.
Example:
```python
elems = [1, 2, 3, 4, 5, 6]
sum = foldl(lambda a, x: a + x, elems)
# sum == 21
```
"""
if not callable(fn):
raise TypeError("fn must be callable.")
with ops.op_scope([elems], name, "foldl"):
# Any get_variable calls in fn will cache the first call locally
# and not issue repeated network I/O requests for each iteration.
varscope = vs.get_variable_scope()
varscope_caching_device_was_none = False
if varscope.caching_device is None:
# TODO(ebrevdo): Change to using colocate_with here and in other methods.
varscope.set_caching_device(lambda op: op.device)
varscope_caching_device_was_none = True
# Convert elems to tensor array.
elems = ops.convert_to_tensor(elems, name="elems")
n = array_ops.shape(elems)[0]
elems_ta = tensor_array_ops.TensorArray(dtype=elems.dtype, size=n,
dynamic_size=False,
infer_shape=True)
elems_ta = elems_ta.unpack(elems)
if initializer is None:
a = elems_ta.read(0)
i = constant_op.constant(1)
else:
a = ops.convert_to_tensor(initializer)
i = constant_op.constant(0)
def compute(i, a):
a = fn(a, elems_ta.read(i))
return [i + 1, a]
_, r_a = control_flow_ops.while_loop(
lambda i, a: i < n, compute, [i, a],
parallel_iterations=parallel_iterations,
back_prop=back_prop,
swap_memory=swap_memory)
if varscope_caching_device_was_none:
varscope.set_caching_device(None)
return r_a
def foldr(fn, elems, initializer=None, parallel_iterations=10, back_prop=True,
swap_memory=False, name=None):
"""foldr on the list of tensors unpacked from `elems` on dimension 0.
This foldr operator repeatedly applies the callable `fn` to a sequence
of elements from last to first. The elements are made of the tensors
unpacked from `elems`. The callable fn takes two tensors as arguments.
The first argument is the accumulated value computed from the preceding
invocation of fn. If `initializer` is None, `elems` must contain at least
one element, and its first element is used as the initializer.
Suppose that `elems` is unpacked into `values`, a list of tensors. The shape
of the result tensor is `fn(initializer, values[0]).shape`.
Args:
fn: The callable to be performed.
elems: A tensor that is unpacked into a sequence of tensors to apply `fn`.
initializer: (optional) The initial value for the accumulator.
parallel_iterations: (optional) The number of iterations allowed to run
in parallel.
back_prop: (optional) True enables support for back propagation.
swap_memory: (optional) True enables GPU-CPU memory swapping.
name: (optional) Name prefix for the returned tensors.
Returns:
A tensor resulting from applying `fn` consecutively to the list of tensors
unpacked from `elems`, from last to first.
Raises:
TypeError: if `fn` is not callable.
Example:
```python
elems = [1, 2, 3, 4, 5, 6]
sum = foldr(lambda a, x: a + x, elems)
# sum == 21
```
"""
if not callable(fn):
raise TypeError("fn must be callable.")
with ops.op_scope([elems], name, "foldr"):
# Any get_variable calls in fn will cache the first call locally
# and not issue repeated network I/O requests for each iteration.
varscope = vs.get_variable_scope()
varscope_caching_device_was_none = False
if varscope.caching_device is None:
# TODO(ebrevdo): Change to using colocate_with here and in other methods.
varscope.set_caching_device(lambda op: op.device)
varscope_caching_device_was_none = True
# Convert elems to tensor array.
elems = ops.convert_to_tensor(elems, name="elems")
n = array_ops.shape(elems)[0]
elems_ta = tensor_array_ops.TensorArray(dtype=elems.dtype, size=n,
dynamic_size=False,
infer_shape=True)
elems_ta = elems_ta.unpack(elems)
if initializer is None:
i = n - 1
a = elems_ta.read(i)
else:
i = n
a = ops.convert_to_tensor(initializer)
def compute(i, a):
i -= 1
a = fn(a, elems_ta.read(i))
return [i, a]
_, r_a = control_flow_ops.while_loop(
lambda i, a: i > 0, compute, [i, a],
parallel_iterations=parallel_iterations,
back_prop=back_prop,
swap_memory=swap_memory)
if varscope_caching_device_was_none:
varscope.set_caching_device(None)
return r_a
def map_fn(fn, elems, dtype=None, parallel_iterations=10, back_prop=True,
swap_memory=False, infer_shape=True, name=None):
"""map on the list of tensors unpacked from `elems` on dimension 0.
The simplest version of `map` repeatedly applies the callable `fn` to a
sequence of elements from first to last. The elements are made of the
tensors unpacked from `elems`. `dtype` is the data type of the return
value of `fn`. Users must provide `dtype` if it is different from
the data type of `elems`.
Suppose that `elems` is unpacked into `values`, a list of tensors. The shape
of the result tensor is `[values.shape[0]] + fn(values[0]).shape`.
This method also allows multi-arity `elems` and output of `fn`. If `elems`
is a (possibly nested) list or tuple of tensors, then each of these tensors
must have a matching first (unpack) dimension. The signature of `fn` may
match the structure of `elems`. That is, if `elems` is
`(t1, [t2, t3, [t4, t5]])`, then an appropriate signature for `fn` is:
`fn = lambda (t1, [t2, t3, [t4, t5]]):`.
Furthermore, `fn` may emit a different structure than its input. For example,
`fn` may look like: `fn = lambda t1: return (t1 + 1, t1 - 1)`. In this case,
the `dtype` parameter is not optional: `dtype` must be a type or (possibly
nested) tuple of types matching the output of `fn`.
Args:
fn: The callable to be performed. It accepts one argument, which will
have the same (possibly nested) structure as `elems`. Its output
must have the same structure as `dtype` if one is provided, otherwise
it must have the same structure as `elems`.
elems: A tensor or (possibly nested) sequence of tensors, each of which
will be unpacked along their first dimension. The nested sequence
of the resulting slices will be applied to `fn`.
dtype: (optional) The output type(s) of `fn`. If `fn` returns a structure
of Tensors differing from the structure of `elems`, then `dtype` is not
optional and must have the same structure as the output of `fn`.
parallel_iterations: (optional) The number of iterations allowed to run
in parallel.
back_prop: (optional) True enables support for back propagation.
swap_memory: (optional) True enables GPU-CPU memory swapping.
infer_shape: (optional) False disables tests for consistent output shapes.
name: (optional) Name prefix for the returned tensors.
Returns:
A tensor or (possibly nested) sequence of tensors. Each tensor packs the
results of applying `fn` to tensors unpacked from `elems` along the first
dimension, from first to last.
Raises:
TypeError: if `fn` is not callable or the structure of the output of
`fn` and `dtype` do not match.
ValueError: if the lengths of the output of `fn` and `dtype` do not match.
Examples:
```python
elems = np.array([1, 2, 3, 4, 5, 6])
squares = map_fn(lambda x: x * x, elems)
# squares == [1, 4, 9, 16, 25, 36]
```
```python
elems = (np.array([1, 2, 3]), np.array([-1, 1, -1]))
alternate = map_fn(lambda x: x[0] * x[1], elems, dtype=tf.int64)
# alternate == [-1, 2, -3]
```
```python
elems = np.array([1, 2, 3])
alternates = map_fn(lambda x: (x, -x), elems, dtype=(tf.int64, tf.int64))
# alternates[0] == [1, 2, 3]
# alternates[1] == [-1, -2, -3]
```
"""
if not callable(fn):
raise TypeError("fn must be callable.")
input_is_sequence = nest.is_sequence(elems)
input_flatten = lambda x: nest.flatten(x) if input_is_sequence else [x]
def input_pack(x):
return nest.pack_sequence_as(elems, x) if input_is_sequence else x[0]
if dtype is None:
output_is_sequence = input_is_sequence
output_flatten = input_flatten
output_pack = input_pack
else:
output_is_sequence = nest.is_sequence(dtype)
output_flatten = lambda x: nest.flatten(x) if output_is_sequence else [x]
def output_pack(x):
return (nest.pack_sequence_as(dtype, x)
if output_is_sequence else x[0])
elems_flat = input_flatten(elems)
with ops.op_scope(elems_flat, name, "map"):
# Any get_variable calls in fn will cache the first call locally
# and not issue repeated network I/O requests for each iteration.
varscope = vs.get_variable_scope()
varscope_caching_device_was_none = False
if varscope.caching_device is None:
# TODO(ebrevdo): Change to using colocate_with here and in other methods.
varscope.set_caching_device(lambda op: op.device)
varscope_caching_device_was_none = True
elems_flat = [
ops.convert_to_tensor(elem, name="elem") for elem in elems_flat]
dtype = dtype or input_pack([elem.dtype for elem in elems_flat])
dtype_flat = output_flatten(dtype)
# Convert elems to tensor array.
n = array_ops.shape(elems_flat[0])[0]
# TensorArrays are always flat
elems_ta = [
tensor_array_ops.TensorArray(dtype=elem.dtype, size=n,
dynamic_size=False,
infer_shape=True)
for elem in elems_flat]
# Unpack elements
elems_ta = [
elem_ta.unpack(elem) for elem_ta, elem in zip(elems_ta, elems_flat)]
i = constant_op.constant(0)
accs_ta = [
tensor_array_ops.TensorArray(dtype=dt, size=n,
dynamic_size=False,
infer_shape=infer_shape)
for dt in dtype_flat]
def compute(i, tas):
"""The loop body of map_fn.
Args:
i: the loop counter
tas: the flat TensorArray accumulator list
Returns:
(i + 1, tas): the updated counter + updated TensorArrays
Raises:
TypeError: if dtype and packed_fn_values structure do not match
ValueType: if dtype and packed_fn_values lengths do not match
"""
packed_values = input_pack([elem_ta.read(i) for elem_ta in elems_ta])
packed_fn_values = fn(packed_values)
nest.assert_same_structure(dtype or elems, packed_fn_values)
flat_fn_values = output_flatten(packed_fn_values)
tas = [ta.write(i, value) for (ta, value) in zip(tas, flat_fn_values)]
return (i + 1, tas)
_, r_a = control_flow_ops.while_loop(
lambda i, _: i < n, compute, (i, accs_ta),
parallel_iterations=parallel_iterations,
back_prop=back_prop,
swap_memory=swap_memory)
results_flat = [r.pack() for r in r_a]
n_static = elems_flat[0].get_shape().with_rank_at_least(1)[0]
for elem in elems_flat[1:]:
n_static.merge_with(elem.get_shape().with_rank_at_least(1)[0])
for r in results_flat:
r.set_shape(tensor_shape.TensorShape(n_static).concatenate(
r.get_shape()[1:]))
if varscope_caching_device_was_none:
varscope.set_caching_device(None)
return output_pack(results_flat)
def scan(fn, elems, initializer=None, parallel_iterations=10, back_prop=True,
swap_memory=False, infer_shape=True, name=None):
"""scan on the list of tensors unpacked from `elems` on dimension 0.
The simplest version of `scan` repeatedly applies the callable `fn` to a
sequence of elements from first to last. The elements are made of the tensors
unpacked from `elems` on dimension 0. The callable fn takes two tensors as
arguments. The first argument is the accumulated value computed from the
preceding invocation of fn. If `initializer` is None, `elems` must contain
at least one element, and its first element is used as the initializer.
Suppose that `elems` is unpacked into `values`, a list of tensors. The shape
of the result tensor is `[len(values)] + fn(initializer, values[0]).shape`.
This method also allows multi-arity `elems` and accumulator. If `elems`
is a (possibly nested) list or tuple of tensors, then each of these tensors
must have a matching first (unpack) dimension. The second argument of
`fn` must match the structure of `elems`.
If no `initializer` is provided, the output structure and dtypes of `fn`
are assumed to be the same as its input; and in this case, the first
argument of `fn` must match the structure of `elems`.
If an `initializer` is provided, then the output of `fn` must have the same
structure as `initializer`; and the first argument of `fn` must match
this structure.
For example, if `elems` is `(t1, [t2, t3])` and `initializer` is
`[i1, i2]` then an appropriate signature for `fn` in `python2` is:
`fn = lambda (acc_p1, acc_p2), (t1 [t2, t3]):` and `fn` must return a list,
`[acc_n1, acc_n2]`. An alternative correct signature for `fn`, and the
one that works in `python3`, is:
`fn = lambda a, t:`, where `a` and `t` correspond to the input tuples.
Args:
fn: The callable to be performed. It accepts two arguments. The first
will have the same (possibly nested) structure as `elems`. The second
will have the same structure as `initializer` if one is provided,
otherwise it will have the same structure as `elems`. Its output
must have the same structure as `initializer` if one is provided,
otherwise it must have the same structure as `elems`.
elems: A tensor or (possibly nested) sequence of tensors, each of which
will be unpacked along their first dimension. The nested sequence
of the resulting slices will be the first argument to `fn`.
initializer: (optional) A tensor or (possibly nested) sequence of tensors,
initial value for the accumulator, and the expected output type of `fn`.
parallel_iterations: (optional) The number of iterations allowed to run
in parallel.
back_prop: (optional) True enables support for back propagation.
swap_memory: (optional) True enables GPU-CPU memory swapping.
infer_shape: (optional) False disables tests for consistent output shapes.
name: (optional) Name prefix for the returned tensors.
Returns:
A tensor or (possibly nested) sequence of tensors. Each tensor packs the
results of applying `fn` to tensors unpacked from `elems` along the first
dimension, and the previous accumulator value(s), from first to last.
Raises:
TypeError: if `fn` is not callable or the structure of the output of
`fn` and `initializer` do not match.
ValueError: if the lengths of the output of `fn` and `initializer`
do not match.
Examples:
```python
elems = np.array([1, 2, 3, 4, 5, 6])
sum = scan(lambda a, x: a + x, elems)
# sum == [1, 3, 6, 10, 15, 21]
```
```python
elems = np.array([1, 2, 3, 4, 5, 6])
initializer = np.array(0)
sum_one = scan(
lambda a, x: x[0] - x[1] + a, (elems + 1, elems), initializer)
# sum_one == [1, 2, 3, 4, 5, 6]
```
```python
elems = np.array([1, 0, 0, 0, 0, 0])
initializer = (np.array(0), np.array(1))
fibonaccis = scan(lambda a, _: (a[1], a[0] + a[1]), elems, initializer)
# fibonaccis == ([1, 1, 2, 3, 5, 8], [1, 2, 3, 5, 8, 13])
```
"""
if not callable(fn):
raise TypeError("fn must be callable.")
input_is_sequence = nest.is_sequence(elems)
input_flatten = lambda x: nest.flatten(x) if input_is_sequence else [x]
def input_pack(x):
return nest.pack_sequence_as(elems, x) if input_is_sequence else x[0]
if initializer is None:
output_is_sequence = input_is_sequence
output_flatten = input_flatten
output_pack = input_pack
else:
output_is_sequence = nest.is_sequence(initializer)
output_flatten = lambda x: nest.flatten(x) if output_is_sequence else [x]
def output_pack(x):
return (nest.pack_sequence_as(initializer, x)
if output_is_sequence else x[0])
elems_flat = input_flatten(elems)
with ops.op_scope(elems_flat, name, "scan"):
# Any get_variable calls in fn will cache the first call locally
# and not issue repeated network I/O requests for each iteration.
varscope = vs.get_variable_scope()
varscope_caching_device_was_none = False
if varscope.caching_device is None:
# TODO(ebrevdo): Change to using colocate_with here and in other methods.
varscope.set_caching_device(lambda op: op.device)
varscope_caching_device_was_none = True
# Convert elems to tensor array.
elems_flat = [
ops.convert_to_tensor(elem, name="elem") for elem in elems_flat]
n = array_ops.shape(elems_flat[0])[0]
# TensorArrays are always flat
elems_ta = [
tensor_array_ops.TensorArray(dtype=elem.dtype, size=n,
dynamic_size=False,
infer_shape=True)
for elem in elems_flat]
# Unpack elements
elems_ta = [
elem_ta.unpack(elem) for elem_ta, elem in zip(elems_ta, elems_flat)]
if initializer is None:
a_flat = [elem.read(0) for elem in elems_ta]
i = constant_op.constant(1)
else:
initializer_flat = output_flatten(initializer)
a_flat = [ops.convert_to_tensor(init) for init in initializer_flat]
i = constant_op.constant(0)
# Create a tensor array to store the intermediate values.
accs_ta = [
tensor_array_ops.TensorArray(dtype=init.dtype, size=n,
dynamic_size=False,
infer_shape=infer_shape)
for init in a_flat]
if initializer is None:
accs_ta = [acc_ta.write(0, a) for (acc_ta, a) in zip(accs_ta, a_flat)]
def compute(i, a_flat, tas):
"""The loop body of scan.
Args:
i: the loop counter.
a_flat: the accumulator value(s), flattened.
tas: the output accumulator TensorArray(s), flattened.
Returns:
[i + 1, a_flat, tas]: the updated counter + new accumulator values +
updated TensorArrays
Raises:
TypeError: if initializer and fn() output structure do not match
ValueType: if initializer and fn() output lengths do not match
"""
packed_elems = input_pack([elem_ta.read(i) for elem_ta in elems_ta])
packed_a = output_pack(a_flat)
a_out = fn(packed_a, packed_elems)
nest.assert_same_structure(
elems if initializer is None else initializer, a_out)
flat_a_out = output_flatten(a_out)
tas = [ta.write(i, value) for (ta, value) in zip(tas, flat_a_out)]
return (i + 1, flat_a_out, tas)
_, _, r_a = control_flow_ops.while_loop(
lambda i, _1, _2: i < n, compute, (i, a_flat, accs_ta),
parallel_iterations=parallel_iterations,
back_prop=back_prop, swap_memory=swap_memory)
results_flat = [r.pack() for r in r_a]
n_static = elems_flat[0].get_shape().with_rank_at_least(1)[0]
for elem in elems_flat[1:]:
n_static.merge_with(elem.get_shape().with_rank_at_least(1)[0])
for r in results_flat:
r.set_shape(tensor_shape.TensorShape(n_static).concatenate(
r.get_shape()[1:]))
if varscope_caching_device_was_none:
varscope.set_caching_device(None)
return output_pack(results_flat)
@ops.RegisterShape("SymbolicGradient")
def _symbolic_gradient_shape(op):
# Say, (u, v) = f(x, y, z), _symbolic_gradient(f) is a function of
# (x, y, z, du, dv) -> (dx, dy, dz). Therefore, shapes of its
# outputs (dx, dy, dz) are the same as (x, y, z).
return [op.inputs[i].get_shape() for i in range(len(op.outputs))]
| Lab603/PicEncyclopedias | jni-build/jni/include/tensorflow/python/ops/functional_ops.py | Python | mit | 23,180 |
(function () {
var totalFactory = function ($resource) {
return $resource("/api/priv/total/");
};
controlCajaApp.factory('totalFactory', ['$resource', totalFactory]);
}()); | froilanq/CursoAngularJS | 10-Configuracion-Cache/client/totalFactory.js | JavaScript | mit | 198 |
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/futex-emulation.h"
#include <limits>
#include "src/base/macros.h"
#include "src/base/platform/time.h"
#include "src/conversions.h"
#include "src/handles-inl.h"
#include "src/isolate.h"
#include "src/list-inl.h"
#include "src/objects-inl.h"
namespace v8 {
namespace internal {
base::LazyMutex FutexEmulation::mutex_ = LAZY_MUTEX_INITIALIZER;
base::LazyInstance<FutexWaitList>::type FutexEmulation::wait_list_ =
LAZY_INSTANCE_INITIALIZER;
void FutexWaitListNode::NotifyWake() {
// Lock the FutexEmulation mutex before notifying. We know that the mutex
// will have been unlocked if we are currently waiting on the condition
// variable.
//
// The mutex may also not be locked if the other thread is currently handling
// interrupts, or if FutexEmulation::Wait was just called and the mutex
// hasn't been locked yet. In either of those cases, we set the interrupted
// flag to true, which will be tested after the mutex is re-locked.
base::LockGuard<base::Mutex> lock_guard(FutexEmulation::mutex_.Pointer());
if (waiting_) {
cond_.NotifyOne();
interrupted_ = true;
}
}
FutexWaitList::FutexWaitList() : head_(nullptr), tail_(nullptr) {}
void FutexWaitList::AddNode(FutexWaitListNode* node) {
DCHECK(node->prev_ == nullptr && node->next_ == nullptr);
if (tail_) {
tail_->next_ = node;
} else {
head_ = node;
}
node->prev_ = tail_;
node->next_ = nullptr;
tail_ = node;
}
void FutexWaitList::RemoveNode(FutexWaitListNode* node) {
if (node->prev_) {
node->prev_->next_ = node->next_;
} else {
head_ = node->next_;
}
if (node->next_) {
node->next_->prev_ = node->prev_;
} else {
tail_ = node->prev_;
}
node->prev_ = node->next_ = nullptr;
}
Object* FutexEmulation::Wait(Isolate* isolate,
Handle<JSArrayBuffer> array_buffer, size_t addr,
int32_t value, double rel_timeout_ms) {
DCHECK(addr < NumberToSize(array_buffer->byte_length()));
void* backing_store = array_buffer->backing_store();
int32_t* p =
reinterpret_cast<int32_t*>(static_cast<int8_t*>(backing_store) + addr);
base::LockGuard<base::Mutex> lock_guard(mutex_.Pointer());
if (*p != value) {
return isolate->heap()->not_equal();
}
FutexWaitListNode* node = isolate->futex_wait_list_node();
node->backing_store_ = backing_store;
node->wait_addr_ = addr;
node->waiting_ = true;
bool use_timeout = rel_timeout_ms != V8_INFINITY;
base::TimeDelta rel_timeout;
if (use_timeout) {
// Convert to nanoseconds.
double rel_timeout_ns = rel_timeout_ms *
base::Time::kNanosecondsPerMicrosecond *
base::Time::kMicrosecondsPerMillisecond;
if (rel_timeout_ns >
static_cast<double>(std::numeric_limits<int64_t>::max())) {
// 2**63 nanoseconds is 292 years. Let's just treat anything greater as
// infinite.
use_timeout = false;
} else {
rel_timeout = base::TimeDelta::FromNanoseconds(
static_cast<int64_t>(rel_timeout_ns));
}
}
base::TimeTicks start_time = base::TimeTicks::Now();
base::TimeTicks timeout_time = start_time + rel_timeout;
base::TimeTicks current_time = start_time;
wait_list_.Pointer()->AddNode(node);
Object* result;
while (true) {
bool interrupted = node->interrupted_;
node->interrupted_ = false;
// Unlock the mutex here to prevent deadlock from lock ordering between
// mutex_ and mutexes locked by HandleInterrupts.
mutex_.Pointer()->Unlock();
// Because the mutex is unlocked, we have to be careful about not dropping
// an interrupt. The notification can happen in three different places:
// 1) Before Wait is called: the notification will be dropped, but
// interrupted_ will be set to 1. This will be checked below.
// 2) After interrupted has been checked here, but before mutex_ is
// acquired: interrupted is checked again below, with mutex_ locked.
// Because the wakeup signal also acquires mutex_, we know it will not
// be able to notify until mutex_ is released below, when waiting on the
// condition variable.
// 3) After the mutex is released in the call to WaitFor(): this
// notification will wake up the condition variable. node->waiting() will
// be false, so we'll loop and then check interrupts.
if (interrupted) {
Object* interrupt_object = isolate->stack_guard()->HandleInterrupts();
if (interrupt_object->IsException(isolate)) {
result = interrupt_object;
mutex_.Pointer()->Lock();
break;
}
}
mutex_.Pointer()->Lock();
if (node->interrupted_) {
// An interrupt occured while the mutex_ was unlocked. Don't wait yet.
continue;
}
if (!node->waiting_) {
result = isolate->heap()->ok();
break;
}
// No interrupts, now wait.
if (use_timeout) {
current_time = base::TimeTicks::Now();
if (current_time >= timeout_time) {
result = isolate->heap()->timed_out();
break;
}
base::TimeDelta time_until_timeout = timeout_time - current_time;
DCHECK(time_until_timeout.InMicroseconds() >= 0);
bool wait_for_result =
node->cond_.WaitFor(mutex_.Pointer(), time_until_timeout);
USE(wait_for_result);
} else {
node->cond_.Wait(mutex_.Pointer());
}
// Spurious wakeup, interrupt or timeout.
}
wait_list_.Pointer()->RemoveNode(node);
node->waiting_ = false;
return result;
}
Object* FutexEmulation::Wake(Isolate* isolate,
Handle<JSArrayBuffer> array_buffer, size_t addr,
uint32_t num_waiters_to_wake) {
DCHECK(addr < NumberToSize(array_buffer->byte_length()));
int waiters_woken = 0;
void* backing_store = array_buffer->backing_store();
base::LockGuard<base::Mutex> lock_guard(mutex_.Pointer());
FutexWaitListNode* node = wait_list_.Pointer()->head_;
while (node && num_waiters_to_wake > 0) {
if (backing_store == node->backing_store_ && addr == node->wait_addr_) {
node->waiting_ = false;
node->cond_.NotifyOne();
if (num_waiters_to_wake != kWakeAll) {
--num_waiters_to_wake;
}
waiters_woken++;
}
node = node->next_;
}
return Smi::FromInt(waiters_woken);
}
Object* FutexEmulation::NumWaitersForTesting(Isolate* isolate,
Handle<JSArrayBuffer> array_buffer,
size_t addr) {
DCHECK(addr < NumberToSize(array_buffer->byte_length()));
void* backing_store = array_buffer->backing_store();
base::LockGuard<base::Mutex> lock_guard(mutex_.Pointer());
int waiters = 0;
FutexWaitListNode* node = wait_list_.Pointer()->head_;
while (node) {
if (backing_store == node->backing_store_ && addr == node->wait_addr_ &&
node->waiting_) {
waiters++;
}
node = node->next_;
}
return Smi::FromInt(waiters);
}
} // namespace internal
} // namespace v8
| hoho/dosido | nodejs/deps/v8/src/futex-emulation.cc | C++ | mit | 7,269 |
// Copyright © Microsoft Corporation. All Rights Reserved.
// This code released under the terms of the
// Microsoft Public License (MS-PL, http://opensource.org/licenses/ms-pl.html.)
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Diagnostics;
using System.IO;
using System.Reflection;
using Microsoft.TeamFoundation.Migration.Shell.View;
using Microsoft.TeamFoundation.Migration.Toolkit;
namespace Microsoft.TeamFoundation.Migration.Shell.Extensibility
{
/// <summary>
/// This class discovers and loads Plugins. It also exposes information about
/// the loaded Plugins, and exposes an interface for invoking Plugin commands.
/// </summary>
public class PluginManager : IPluginManager
{
#region Fields
private readonly PluginHandler[] pluginHandlers;
private readonly PluginContextCollection pluginContexts;
#endregion
#region Constructors
/// <summary>
/// Initializes a new instance of the <see cref="PluginManager"/> class.
/// </summary>
/// <remarks>
/// Plugin probing directories are read from the application configuration file.
/// </remarks>
public PluginManager () : this (PluginManager.GetPluginDirectoriesFromConfig ())
{
}
/// <summary>
/// Initializes a new instance of the <see cref="PluginManager"/> class.
/// </summary>
/// <param name="probingDirectories">
/// Specifies directories to probe for Plugins.
/// </param>
public PluginManager (params DirectoryInfo[] probingDirectories)
{
this.pluginHandlers = PluginManager.DiscoverPlugins (probingDirectories);
this.pluginContexts = new PluginContextCollection (this);
foreach (PluginHandler pluginHandler in this.pluginHandlers)
{
pluginHandler.Loaded += this.OnPluginLoaded;
pluginHandler.Unloaded += this.OnPluginUnloaded;
}
}
public IEnumerable<IMigrationSourceView> GetMigrationSourceViews()
{
List<IMigrationSourceView> migrationSourceViews = new List<IMigrationSourceView>();
foreach (PluginHandler pluginHandler in pluginHandlers)
{
migrationSourceViews.Add(pluginHandler.GetMigrationSourceView());
}
return migrationSourceViews;
}
public IEnumerable<IConflictTypeView> GetConflictTypes(Guid providerId)
{
try
{
foreach (PluginHandler pluginHandler in pluginHandlers)
{
if (pluginHandler.Descriptor.Id.Equals(providerId))
{
return pluginHandler.GetConflictTypeViews();
}
}
}
catch (NotImplementedException)
{ }
return null;
}
public ExecuteFilterStringExtension GetFilterStringExtension(Guid providerId)
{
try
{
foreach (PluginHandler pluginhandler in pluginHandlers)
{
if (pluginhandler.Descriptor.Id.Equals(providerId))
{
return pluginhandler.FilterStringExtension;
}
}
}
catch (Exception)
{
}
return null;
}
#endregion
#region Properties
/// <summary>
/// Gets the Context Provider collection. Context Providers can freely be added and removed at runtime.
/// </summary>
public ICollection<object> PluginContexts
{
get
{
return this.pluginContexts;
}
}
/// <summary>
/// Gets information about all Plugins that have been loaded by the Plugin Manager.
/// </summary>
public IEnumerable<PluginDescriptor> LoadedPlugins
{
get
{
foreach (PluginHandler pluginHandler in this.LoadedPluginHandlers)
{
yield return pluginHandler.Descriptor;
}
}
}
private IEnumerable<PluginHandler> LoadedPluginHandlers
{
get
{
foreach (PluginHandler pluginHandler in this.pluginHandlers)
{
if (pluginHandler.IsLoaded)
{
yield return pluginHandler;
}
}
}
}
#endregion
#region Events
/// <summary>
/// Occurs when a Plugin is loaded.
/// </summary>
public event EventHandler<PluginLoadedEventArgs> PluginLoaded;
/// <summary>
/// Occurs when a Plugin is unloaded.
/// </summary>
public event EventHandler<PluginLoadedEventArgs> PluginUnloaded;
#endregion
#region Private Methods
private void OnPluginContextAdded (object context)
{
foreach (PluginHandler pluginHandler in this.pluginHandlers)
{
if (pluginHandler.SupportsContext (context.GetType ()))
{
pluginHandler.OnContextEnter (context);
}
}
}
private void OnPluginContextRemoved (object context)
{
foreach (PluginHandler pluginHandler in this.LoadedPluginHandlers)
{
if (pluginHandler.SupportsContext (context.GetType ()))
{
pluginHandler.OnContextLeave (context);
}
}
}
private static DirectoryInfo[] GetPluginDirectoriesFromConfig ()
{
List<DirectoryInfo> pluginDirectories = new List<DirectoryInfo> (Properties.Settings.Default.PluginDirectories.Count);
foreach (string pluginDirectory in Properties.Settings.Default.PluginDirectories)
{
// Expand environment variables
string resolvedPluginDirectory = Environment.ExpandEnvironmentVariables (pluginDirectory);
// If the directory is not rooted, make it relative to the application path
if (!Path.IsPathRooted (resolvedPluginDirectory))
{
resolvedPluginDirectory = Path.Combine (System.Windows.Forms.Application.StartupPath, resolvedPluginDirectory);
}
// Add the directory to the running list
pluginDirectories.Add (new DirectoryInfo (resolvedPluginDirectory));
}
return pluginDirectories.ToArray ();
}
private static PluginHandler[] DiscoverPlugins (DirectoryInfo[] probingDirectories)
{
// Initialize a list that will contain all plugin types discovered
List<PluginHandler> pluginHandlers = new List<PluginHandler> ();
if (probingDirectories != null)
{
// Iterate over the probing directories and look for plugins
foreach (DirectoryInfo directory in probingDirectories)
{
if (directory.Exists)
{
// Try to load plugins from each dll
foreach (FileInfo file in directory.GetFiles ("*.dll"))
{
try
{
// Load the dll into an assembly
Assembly assembly = Assembly.LoadFrom (file.FullName);
// Iterate over all types contained in the assembly
foreach (Type type in assembly.GetTypes ())
{
// Only consider public, concrete types that implement IPlugin
if (type.IsPublic && !type.IsAbstract && typeof (IPlugin).IsAssignableFrom (type))
{
PluginHandler pluginHandler = PluginHandler.FromType (type);
if (pluginHandler != null)
{
pluginHandlers.Add (pluginHandler);
}
}
}
}
catch (Exception exception)
{
Utilities.DefaultTraceSource.TraceEvent (TraceEventType.Error, 0, "A failure occurred while trying to load the {0} Plugin: {1}{2}", file.FullName, Environment.NewLine, exception.ToString ());
}
}
}
}
}
// Return the list of plugin types discovered
return pluginHandlers.ToArray ();
}
private void OnPluginLoaded (object sender, EventArgs e)
{
PluginHandler pluginHandler = (PluginHandler)sender;
if (this.PluginLoaded != null)
{
this.PluginLoaded (this, new PluginLoadedEventArgs (pluginHandler.Descriptor));
}
}
private void OnPluginUnloaded (object sender, EventArgs e)
{
PluginHandler pluginHandler = (PluginHandler)sender;
if (this.PluginUnloaded != null)
{
this.PluginUnloaded (this, new PluginLoadedEventArgs (pluginHandler.Descriptor));
}
}
#endregion
#region Classes
private class PluginContextCollection : Collection<object>
{
#region Fields
private PluginManager pluginManager;
#endregion
#region Constructors
public PluginContextCollection (PluginManager pluginManager)
{
this.pluginManager = pluginManager;
}
#endregion
#region Protected Methods
protected override void InsertItem (int index, object context)
{
base.InsertItem (index, context);
this.pluginManager.OnPluginContextAdded (context);
}
protected override void RemoveItem (int index)
{
object context = this[index];
base.RemoveItem (index);
this.pluginManager.OnPluginContextRemoved (context);
}
protected override void SetItem (int index, object newContext)
{
object oldContext = this[index];
base.SetItem (index, newContext);
this.pluginManager.OnPluginContextRemoved (oldContext);
this.pluginManager.OnPluginContextAdded (newContext);
}
protected override void ClearItems ()
{
foreach (object context in this)
{
this.pluginManager.OnPluginContextRemoved (context);
}
base.ClearItems ();
}
#endregion
}
#endregion
}
}
| adamdriscoll/TfsIntegrationPlatform | IntegrationPlatform/Shell/EditorFoundation/Source/Extensibility/PluginManager.cs | C# | mit | 11,472 |
/*
** delay_deny
**
** This plugin delays all pre-DATA 'deny' results until the recipients are sent
** and all post-DATA commands until all hook_data_post plugins have run.
** This allows relays and authenticated users to bypass pre-DATA rejections.
*/
exports.hook_deny = function (next, connection, params) {
/* params
** [0] = plugin return value (DENY or DENYSOFT)
** [1] = plugin return message
*/
var pi_name = params[2];
var pi_function = params[3];
// var pi_params = params[4];
var pi_hook = params[5];
var plugin = this;
var transaction = connection.transaction;
// Don't delay ourselves...
if (pi_name == 'delay_deny') return next();
// Load config
var cfg = this.config.get('delay_deny.ini');
var skip;
var included;
if (cfg.main.included_plugins) {
included = cfg.main.included_plugins.split(/[;, ]+/);
} else if (cfg.main.excluded_plugins) {
skip = cfg.main.excluded_plugins.split(/[;, ]+/);
}
// 'included' mode: only delay deny plugins in the included list
if (included && included.length) {
if (included.indexOf(pi_name) === -1 &&
included.indexOf(pi_name + ':' + pi_hook) === -1 &&
included.indexOf(pi_name + ':' + pi_hook + ':' + pi_function) === -1) {
return next();
}
} else if (skip && skip.length) { // 'excluded' mode: delay deny everything except in skip list
// Skip by <plugin name>
if (skip.indexOf(pi_name) !== -1) {
connection.logdebug(plugin, 'not delaying excluded plugin: ' + pi_name);
return next();
}
// Skip by <plugin name>:<hook>
if (skip.indexOf(pi_name + ':' + pi_hook) !== -1) {
connection.logdebug(plugin, 'not delaying excluded hook: ' + pi_hook +
' in plugin: ' + pi_name);
return next();
}
// Skip by <plugin name>:<hook>:<function name>
if (skip.indexOf(pi_name + ':' + pi_hook + ':' + pi_function) !== -1) {
connection.logdebug(plugin, 'not delaying excluded function: ' + pi_function +
' on hook: ' + pi_hook + ' in plugin: ' + pi_name);
return next();
}
}
switch (pi_hook) {
// Pre-DATA connection delays
case 'lookup_rdns':
case 'connect':
case 'ehlo':
case 'helo':
if (!connection.notes.delay_deny_pre) {
connection.notes.delay_deny_pre = [];
}
connection.notes.delay_deny_pre.push(params);
if (!connection.notes.delay_deny_pre_fail) {
connection.notes.delay_deny_pre_fail = {};
}
connection.notes.delay_deny_pre_fail[pi_name] = 1;
return next(OK);
// Pre-DATA transaction delays
case 'mail':
case 'rcpt':
case 'rcpt_ok':
if (!transaction.notes.delay_deny_pre) {
transaction.notes.delay_deny_pre = [];
}
transaction.notes.delay_deny_pre.push(params);
if (!transaction.notes.delay_deny_pre_fail) {
transaction.notes.delay_deny_pre_fail = {};
}
transaction.notes.delay_deny_pre_fail[pi_name] = 1;
return next(OK);
// Post-DATA delays
case 'data':
case 'data_post':
// fall through
default:
// No delays
return next();
}
};
exports.hook_rcpt_ok = function (next, connection, rcpt) {
var plugin = this;
var transaction = connection.transaction;
// Bypass all pre-DATA deny for AUTH/RELAY
if (connection.relaying) {
connection.loginfo(plugin, 'bypassing all pre-DATA deny: AUTH/RELAY');
return next();
}
// Apply any delayed rejections
// Check connection level pre-DATA rejections first
if (connection.notes.delay_deny_pre) {
for (let i=0; i<connection.notes.delay_deny_pre.length; i++) {
let params = connection.notes.delay_deny_pre[i];
return next(params[0], params[1]);
}
}
// Then check transaction level pre-DATA
if (transaction.notes.delay_deny_pre) {
for (let i=0; i<transaction.notes.delay_deny_pre.length; i++) {
let params = transaction.notes.delay_deny_pre[i];
// Remove rejection from the array if it was on the rcpt hooks
if (params[5] === 'rcpt' || params[5] === 'rcpt_ok') {
transaction.notes.delay_deny_pre.splice(i, 1);
}
return next(params[0], params[1]);
}
}
return next();
};
exports.hook_data = function (next, connection) {
var transaction = connection.transaction;
// Add a header showing all pre-DATA rejections
var fails = [];
if (connection.notes.delay_deny_pre_fail) {
fails.push.apply(Object.keys(connection.notes.delay_deny_pre_fail));
}
if (transaction.notes.delay_deny_pre_fail) {
fails.push.apply(Object.keys(transaction.notes.delay_deny_pre_fail));
}
if (fails.length) transaction.add_header('X-Haraka-Fail-Pre', fails.join(' '));
return next();
}
| Synchro/Haraka | plugins/delay_deny.js | JavaScript | mit | 5,266 |
module Fastlane
module Actions
class EnsureNoDebugCodeAction < Action
def self.run(params)
command = "grep -RE '#{params[:text]}' '#{File.absolute_path(params[:path])}'"
extensions = []
extensions << params[:extension] unless params[:extension].nil?
if params[:extensions]
params[:extensions].each do |extension|
extension.delete!('.') if extension.include?(".")
extensions << extension
end
end
if extensions.count > 1
command << " --include=\\*.{#{extensions.join(',')}}"
elsif extensions.count > 0
command << " --include=\\*.#{extensions.join(',')}"
end
command << " --exclude #{params[:exclude]}" if params[:exclude]
if params[:exclude_dirs]
params[:exclude_dirs].each do |dir|
command << " --exclude-dir #{dir.shellescape}"
end
end
return command if Helper.test?
UI.important(command)
results = `#{command}` # we don't use `sh` as the return code of grep is wrong for some reason
# Example Output
# ./fastlane.gemspec: spec.add_development_dependency 'my_word'
# ./Gemfile.lock: my_word (0.10.1)
found = []
results.split("\n").each do |current_raw|
found << current_raw.strip
end
UI.user_error!("Found debug code '#{params[:text]}': \n\n#{found.join("\n")}") if found.count > 0
UI.message("No debug code found in code base 🐛")
end
#####################################################
# @!group Documentation
#####################################################
def self.description
"Ensures the given text is nowhere in the code base"
end
def self.details
[
"You don't want any debug code to slip into production.",
"This can be used to check if there is any debug code still in your codebase or if you have things like `// TO DO` or similar."
].join("\n")
end
def self.available_options
[
FastlaneCore::ConfigItem.new(key: :text,
env_name: "FL_ENSURE_NO_DEBUG_CODE_TEXT",
description: "The text that must not be in the code base"),
FastlaneCore::ConfigItem.new(key: :path,
env_name: "FL_ENSURE_NO_DEBUG_CODE_PATH",
description: "The directory containing all the source files",
default_value: ".",
verify_block: proc do |value|
UI.user_error!("Couldn't find the folder at '#{File.absolute_path(value)}'") unless File.directory?(value)
end),
FastlaneCore::ConfigItem.new(key: :extension,
env_name: "FL_ENSURE_NO_DEBUG_CODE_EXTENSION",
description: "The extension that should be searched for",
optional: true,
verify_block: proc do |value|
value.delete!('.') if value.include?(".")
end),
FastlaneCore::ConfigItem.new(key: :extensions,
env_name: "FL_ENSURE_NO_DEBUG_CODE_EXTENSIONS",
description: "An array of file extensions that should be searched for",
optional: true,
type: Array),
FastlaneCore::ConfigItem.new(key: :exclude,
env_name: "FL_ENSURE_NO_DEBUG_CODE_EXCLUDE",
description: "Exclude a certain pattern from the search",
optional: true),
FastlaneCore::ConfigItem.new(key: :exclude_dirs,
env_name: "FL_ENSURE_NO_DEBUG_CODE_EXCLUDE_DIRS",
description: "An array of dirs that should not be included in the search",
optional: true,
type: Array)
]
end
def self.output
[]
end
def self.authors
["KrauseFx"]
end
def self.example_code
[
'ensure_no_debug_code(text: "// TODO")',
'ensure_no_debug_code(text: "Log.v",
extension: "java")',
'ensure_no_debug_code(text: "NSLog",
path: "./lib",
extension: "m")',
'ensure_no_debug_code(text: "(^#define DEBUG|NSLog)",
path: "./lib",
extension: "m")',
'ensure_no_debug_code(text: "<<<<<<",
extensions: ["m", "swift", "java"])'
]
end
def self.category
:misc
end
def self.is_supported?(platform)
true
end
end
end
end
| fastlane/fastlane | fastlane/lib/fastlane/actions/ensure_no_debug_code.rb | Ruby | mit | 5,274 |
module ResultsHelper
def remark_result_unsubmitted_or_released(remark_result)
remark_result.marking_state == Result::MARKING_STATES[:unmarked] or
remark_result.released_to_students
end
def can_show_remark_request_tab_in_student_pane(assignment, current_user, submission)
if assignment.allow_remarks
if submission.get_remark_result and submission.get_remark_result.released_to_students
return true
else
return (current_user.student?)
end
else
false
end
end
def student_can_edit_remark_request(submission)
!submission.get_remark_result or
submission.get_remark_result.marking_state == Result::MARKING_STATES[:unmarked]
end
def can_show_remark_request_tab_in_marker_pane(submission)
!student_can_edit_remark_request(submission)
end
end
| Lysette/Markus | app/helpers/results_helper.rb | Ruby | mit | 838 |
<?php
/*
* This file is part of the puli/repository package.
*
* (c) Bernhard Schussek <bschussek@gmail.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Puli\Repository;
use Puli\Repository\Api\Resource\FilesystemResource;
use Puli\Repository\Api\ResourceCollection;
use Puli\Repository\Api\UnsupportedResourceException;
use Puli\Repository\Resource\DirectoryResource;
use Puli\Repository\Resource\FileResource;
use Puli\Repository\Resource\GenericResource;
use RuntimeException;
use Webmozart\KeyValueStore\Api\CountableStore;
use Webmozart\KeyValueStore\Api\KeyValueStore;
use Webmozart\KeyValueStore\Api\SortableStore;
use Webmozart\KeyValueStore\Decorator\CountableDecorator;
use Webmozart\KeyValueStore\Decorator\SortableDecorator;
use Webmozart\PathUtil\Path;
/**
* Abstract base for Path mapping repositories.
*
* @since 1.0
*
* @author Bernhard Schussek <bschussek@gmail.com>
* @author Titouan Galopin <galopintitouan@gmail.com>
*/
abstract class AbstractPathMappingRepository extends AbstractRepository
{
/**
* @var KeyValueStore
*/
protected $store;
/**
* Creates a new repository.
*
* @param KeyValueStore $store The store of all the paths.
*/
public function __construct(KeyValueStore $store)
{
$this->store = $store;
$this->createRoot();
}
/**
* Add the resource (internal method after checks of add()).
*
* @param string $path
* @param FilesystemResource $resource
*/
abstract protected function addResource($path, FilesystemResource $resource);
/**
* {@inheritdoc}
*/
public function add($path, $resource)
{
$path = $this->sanitizePath($path);
if ($resource instanceof ResourceCollection) {
$this->ensureDirectoryExists($path);
foreach ($resource as $child) {
$this->addResource($path.'/'.$child->getName(), $child);
}
$this->sortStore();
return;
}
if ($resource instanceof FilesystemResource) {
$this->ensureDirectoryExists(Path::getDirectory($path));
$this->addResource($path, $resource);
$this->sortStore();
return;
}
throw new UnsupportedResourceException(sprintf(
'The passed resource must be a FilesystemResource or a ResourceCollection. Got: %s',
is_object($resource) ? get_class($resource) : gettype($resource)
));
}
/**
* {@inheritdoc}
*/
public function clear()
{
// Subtract root
$removed = $this->countStore() - 1;
$this->store->clear();
$this->createRoot();
return $removed;
}
/**
* Recursively creates a directory for a path.
*
* @param string $path A directory path.
*/
protected function ensureDirectoryExists($path)
{
if ($this->store->exists($path)) {
return;
}
// Recursively initialize parent directories
if ('/' !== $path) {
$this->ensureDirectoryExists(Path::getDirectory($path));
}
$this->store->set($path, null);
}
/**
* Create the repository root.
*/
protected function createRoot()
{
if ($this->store->exists('/')) {
return;
}
$this->store->set('/', null);
}
/**
* Count the number of elements in the store.
*
* @return int
*/
protected function countStore()
{
if (!$this->store instanceof CountableStore) {
$this->store = new CountableDecorator($this->store);
}
return $this->store->count();
}
/**
* Sort the store by keys.
*/
protected function sortStore()
{
if (!$this->store instanceof SortableStore) {
$this->store = new SortableDecorator($this->store);
}
$this->store->sort();
}
/**
* Create a filesystem or generic resource.
*
* @param string $filesystemPath
*
* @return DirectoryResource|FileResource|GenericResource
*/
protected function createResource($filesystemPath, $path = null)
{
if ($filesystemPath && file_exists($filesystemPath)) {
return $this->createFilesystemResource($filesystemPath, $path);
}
return $this->createVirtualResource($path);
}
/**
* Create a resource using its filesystem path.
*
* If the filesystem path is a directory, a DirectoryResource will be created.
* If the filesystem path is a file, a FileResource will be created.
* If the filesystem does not exists, a GenericResource will be created.
*
* @param string $filesystemPath The filesystem path.
* @param string $path The repository path.
*
* @return DirectoryResource|FileResource The created resource.
*
* @throws RuntimeException If the file / directory does not exist.
*/
protected function createFilesystemResource($filesystemPath, $path = null)
{
$resource = null;
if (is_dir($filesystemPath)) {
$resource = new DirectoryResource($filesystemPath);
} elseif (is_file($filesystemPath)) {
$resource = new FileResource($filesystemPath);
}
if ($resource) {
$resource->attachTo($this, $path);
return $resource;
}
throw new RuntimeException(sprintf(
'Trying to create a FilesystemResource on a non-existing file or directory "%s"',
$filesystemPath
));
}
/**
* @param string|null $path
*
* @return GenericResource
*/
protected function createVirtualResource($path = null)
{
$resource = new GenericResource();
$resource->attachTo($this, $path);
return $resource;
}
}
| stof/repository | src/AbstractPathMappingRepository.php | PHP | mit | 6,028 |
#include <assert.h>
#include <stdio.h>
#include <ulib/os_atomic_intel64.h>
int main()
{
atomic_barrier();
uint64_t a = 0;
assert(atomic_cmpswp64(&a, 1, 2) == 0);
assert(a == 0);
assert(atomic_cmpswp64(&a, 0, 2) == 0);
assert(a == 2);
assert(atomic_cmpswp16(&a, 0, 2) == 2);
assert(atomic_cmpswp16(&a, 2, 0) == 2);
assert(atomic_cmpswp16(&a, 0, 2) == 0);
assert(a == 2);
assert(atomic_fetchadd64(&a, 1) == 2);
assert(a == 3);
assert(atomic_fetchadd64(&a, -1) == 3);
assert(a == 2);
assert(atomic_fetchstore64(&a, 5) == 2);
assert(a == 5);
assert(atomic_test_and_set_bit64(&a, 0) == -1);
assert(a == 5);
assert(atomic_test_and_set_bit64(&a, 1) == 0);
assert(a == 7);
atomic_and64(&a, ~7ul);
assert(a == 0);
atomic_or64(&a, (1ull << 63));
assert(a == (1ull << 63));
assert(atomic_test_and_set_bit64(&a, 63) == -1);
atomic_or8(&a, 1);
assert(a == ((1ull << 63) | 1));
atomic_and8(&a, (int8_t)~1u);
assert(a == (1ull << 63));
atomic_and64(&a, ~(1ull << 63));
assert(a == 0);
atomic_inc64(&a);
assert(a == 1);
atomic_dec64(&a);
assert(a == 0);
atomic_add64(&a, -1);
assert(a == (uint64_t)-1);
atomic_cpu_relax();
printf("passed\n");
return 0;
}
| abhishek1015/schema_design | ulib/test/atomic.cpp | C++ | mit | 1,184 |
/*
[auto_generated]
boost/numeric/odeint/algebra/range_algebra.hpp
[begin_description]
Default algebra, which works with the most state types, like vector< double >, boost::array< double >, boost::range.
Internally is uses boost::range to obtain the begin and end iterator of the according sequence.
[end_description]
Copyright 2009-2011 Karsten Ahnert
Copyright 2009-2011 Mario Mulansky
Distributed under the Boost Software License, Version 1.0.
(See accompanying file LICENSE_1_0.txt or
copy at http://www.boost.org/LICENSE_1_0.txt)
*/
#ifndef BOOST_NUMERIC_ODEINT_ALGEBRA_RANGE_ALGEBRA_HPP_INCLUDED
#define BOOST_NUMERIC_ODEINT_ALGEBRA_RANGE_ALGEBRA_HPP_INCLUDED
#include <boost/range.hpp>
#include <boost/mpl/size_t.hpp>
#include <boost/numeric/odeint/algebra/detail/macros.hpp>
#include <boost/numeric/odeint/algebra/detail/for_each.hpp>
#include <boost/numeric/odeint/algebra/detail/norm_inf.hpp>
#include <boost/numeric/odeint/algebra/norm_result_type.hpp>
namespace boost {
namespace numeric {
namespace odeint {
struct range_algebra
{
template< class S1 , class Op >
static void for_each1( S1 &s1 , Op op )
{
detail::for_each1( boost::begin( s1 ) , boost::end( s1 ) ,
op );
}
template< class S1 , class S2 , class Op >
static void for_each2( S1 &s1 , S2 &s2 , Op op )
{
detail::for_each2( boost::begin( s1 ) , boost::end( s1 ) ,
boost::begin( s2 ) , op );
}
template< class S1 , class S2 , class S3 , class Op >
static void for_each3( S1 &s1 , S2 &s2 , S3 &s3 , Op op )
{
detail::for_each3( boost::begin( s1 ) , boost::end( s1 ) , boost::begin( s2 ) , boost::begin( s3 ) , op );
}
template< class S1 , class S2 , class S3 , class S4 , class Op >
static void for_each4( S1 &s1 , S2 &s2 , S3 &s3 , S4 &s4 , Op op )
{
detail::for_each4( boost::begin( s1 ) , boost::end( s1 ) , boost::begin( s2 ) , boost::begin( s3 ) , boost::begin( s4 ) , op );
}
template< class S1 , class S2 , class S3 , class S4 , class S5 , class Op >
static void for_each5( S1 &s1 , S2 &s2 , S3 &s3 , S4 &s4 , S5 &s5 , Op op )
{
detail::for_each5( boost::begin( s1 ) , boost::end( s1 ) , boost::begin( s2 ) , boost::begin( s3 ) , boost::begin( s4 ) , boost::begin( s5 ) , op );
}
template< class S1 , class S2 , class S3 , class S4 , class S5 , class S6 , class Op >
static void for_each6( S1 &s1 , S2 &s2 , S3 &s3 , S4 &s4 , S5 &s5 , S6 &s6 , Op op )
{
detail::for_each6( boost::begin( s1 ) , boost::end( s1 ) , boost::begin( s2 ) , boost::begin( s3 ) , boost::begin( s4 ) , boost::begin( s5 ) , boost::begin( s6 ) , op );
}
template< class S1 , class S2 , class S3 , class S4 , class S5 , class S6 ,class S7 , class Op >
static void for_each7( S1 &s1 , S2 &s2 , S3 &s3 , S4 &s4 , S5 &s5 , S6 &s6 , S7 &s7 , Op op )
{
detail::for_each7( boost::begin( s1 ) , boost::end( s1 ) , boost::begin( s2 ) , boost::begin( s3 ) , boost::begin( s4 ) , boost::begin( s5 ) , boost::begin( s6 ) , boost::begin( s7 ) , op );
}
template< class S1 , class S2 , class S3 , class S4 , class S5 , class S6 ,class S7 , class S8 , class Op >
static void for_each8( S1 &s1 , S2 &s2 , S3 &s3 , S4 &s4 , S5 &s5 , S6 &s6 , S7 &s7 , S8 &s8 , Op op )
{
detail::for_each8( boost::begin( s1 ) , boost::end( s1 ) , boost::begin( s2 ) , boost::begin( s3 ) , boost::begin( s4 ) , boost::begin( s5 ) , boost::begin( s6 ) , boost::begin( s7 ) , boost::begin( s8 ) , op );
}
template< class S1 , class S2 , class S3 , class S4 , class S5 , class S6 ,class S7 , class S8 , class S9 , class Op >
static void for_each9( S1 &s1 , S2 &s2 , S3 &s3 , S4 &s4 , S5 &s5 , S6 &s6 , S7 &s7 , S8 &s8 , S9 &s9 , Op op )
{
detail::for_each9( boost::begin( s1 ) , boost::end( s1 ) , boost::begin( s2 ) , boost::begin( s3 ) , boost::begin( s4 ) , boost::begin( s5 ) , boost::begin( s6 ) , boost::begin( s7 ) , boost::begin( s8 ) , boost::begin( s9 ) , op );
}
template< class S1 , class S2 , class S3 , class S4 , class S5 , class S6 ,class S7 , class S8 , class S9 , class S10 , class Op >
static void for_each10( S1 &s1 , S2 &s2 , S3 &s3 , S4 &s4 , S5 &s5 , S6 &s6 , S7 &s7 , S8 &s8 , S9 &s9 , S10 &s10 , Op op )
{
detail::for_each10( boost::begin( s1 ) , boost::end( s1 ) , boost::begin( s2 ) , boost::begin( s3 ) , boost::begin( s4 ) , boost::begin( s5 ) , boost::begin( s6 ) , boost::begin( s7 ) , boost::begin( s8 ) , boost::begin( s9 ) , boost::begin( s10 ) , op );
}
template< class S1 , class S2 , class S3 , class S4 , class S5 , class S6 ,class S7 , class S8 , class S9 , class S10 , class S11 , class Op >
static void for_each11( S1 &s1 , S2 &s2 , S3 &s3 , S4 &s4 , S5 &s5 , S6 &s6 , S7 &s7 , S8 &s8 , S9 &s9 , S10 &s10 , S11 &s11 , Op op )
{
detail::for_each11( boost::begin( s1 ) , boost::end( s1 ) , boost::begin( s2 ) , boost::begin( s3 ) , boost::begin( s4 ) , boost::begin( s5 ) , boost::begin( s6 ) , boost::begin( s7 ) , boost::begin( s8 ) , boost::begin( s9 ) , boost::begin( s10 ) , boost::begin( s11 ) , op );
}
template< class S1 , class S2 , class S3 , class S4 , class S5 , class S6 ,class S7 , class S8 , class S9 , class S10 , class S11 , class S12 , class Op >
static void for_each12( S1 &s1 , S2 &s2 , S3 &s3 , S4 &s4 , S5 &s5 , S6 &s6 , S7 &s7 , S8 &s8 , S9 &s9 , S10 &s10 , S11 &s11 , S12 &s12 , Op op )
{
detail::for_each12( boost::begin( s1 ) , boost::end( s1 ) , boost::begin( s2 ) , boost::begin( s3 ) , boost::begin( s4 ) , boost::begin( s5 ) , boost::begin( s6 ) , boost::begin( s7 ) , boost::begin( s8 ) , boost::begin( s9 ) , boost::begin( s10 ) , boost::begin( s11 ) , boost::begin( s12 ) , op );
}
template< class S1 , class S2 , class S3 , class S4 , class S5 , class S6 ,class S7 , class S8 , class S9 , class S10 , class S11 , class S12 , class S13 , class Op >
static void for_each13( S1 &s1 , S2 &s2 , S3 &s3 , S4 &s4 , S5 &s5 , S6 &s6 , S7 &s7 , S8 &s8 , S9 &s9 , S10 &s10 , S11 &s11 , S12 &s12 , S13 &s13 , Op op )
{
detail::for_each13( boost::begin( s1 ) , boost::end( s1 ) , boost::begin( s2 ) , boost::begin( s3 ) , boost::begin( s4 ) , boost::begin( s5 ) , boost::begin( s6 ) , boost::begin( s7 ) , boost::begin( s8 ) , boost::begin( s9 ) , boost::begin( s10 ) , boost::begin( s11 ) , boost::begin( s12 ) , boost::begin( s13 ) , op );
}
template< class S1 , class S2 , class S3 , class S4 , class S5 , class S6 ,class S7 , class S8 , class S9 , class S10 , class S11 , class S12 , class S13 , class S14 , class Op >
static void for_each14( S1 &s1 , S2 &s2 , S3 &s3 , S4 &s4 , S5 &s5 , S6 &s6 , S7 &s7 , S8 &s8 , S9 &s9 , S10 &s10 , S11 &s11 , S12 &s12 , S13 &s13 , S14 &s14 , Op op )
{
detail::for_each14( boost::begin( s1 ) , boost::end( s1 ) , boost::begin( s2 ) , boost::begin( s3 ) , boost::begin( s4 ) , boost::begin( s5 ) , boost::begin( s6 ) , boost::begin( s7 ) , boost::begin( s8 ) , boost::begin( s9 ) , boost::begin( s10 ) , boost::begin( s11 ) , boost::begin( s12 ) , boost::begin( s13 ) , boost::begin( s14 ) , op );
}
template< class S1 , class S2 , class S3 , class S4 , class S5 , class S6 ,class S7 , class S8 , class S9 , class S10 , class S11 , class S12 , class S13 , class S14 , class S15 , class Op >
static void for_each15( S1 &s1 , S2 &s2 , S3 &s3 , S4 &s4 , S5 &s5 , S6 &s6 , S7 &s7 , S8 &s8 , S9 &s9 , S10 &s10 , S11 &s11 , S12 &s12 , S13 &s13 , S14 &s14 , S15 &s15 , Op op )
{
detail::for_each15( boost::begin( s1 ) , boost::end( s1 ) , boost::begin( s2 ) , boost::begin( s3 ) , boost::begin( s4 ) , boost::begin( s5 ) , boost::begin( s6 ) , boost::begin( s7 ) , boost::begin( s8 ) , boost::begin( s9 ) , boost::begin( s10 ) , boost::begin( s11 ) , boost::begin( s12 ) , boost::begin( s13 ) , boost::begin( s14 ) , boost::begin( s15 ) , op );
}
template< typename S >
static typename norm_result_type<S>::type norm_inf( const S &s )
{
return detail::norm_inf( boost::begin( s ) , boost::end( s ) ,
static_cast< typename norm_result_type<S>::type >( 0 ) );
}
};
} // odeint
} // numeric
} // boost
#endif // BOOST_NUMERIC_ODEINT_ALGEBRA_RANGE_ALGEBRA_HPP_INCLUDED
| gt-ros-pkg/humans | src/videoray/catkin_ws/src/videoray/include/boost/numeric/odeint/algebra/range_algebra.hpp | C++ | mit | 8,358 |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.eventgrid.models;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* Schema of the Data property of an EventGridEvent for a
* Microsoft.Resources.ResourceActionSuccess event. This is raised when a
* resource action operation succeeds.
*/
public class ResourceActionSuccessData {
/**
* The tenant ID of the resource.
*/
@JsonProperty(value = "tenantId")
private String tenantId;
/**
* The subscription ID of the resource.
*/
@JsonProperty(value = "subscriptionId")
private String subscriptionId;
/**
* The resource group of the resource.
*/
@JsonProperty(value = "resourceGroup")
private String resourceGroup;
/**
* The resource provider performing the operation.
*/
@JsonProperty(value = "resourceProvider")
private String resourceProvider;
/**
* The URI of the resource in the operation.
*/
@JsonProperty(value = "resourceUri")
private String resourceUri;
/**
* The operation that was performed.
*/
@JsonProperty(value = "operationName")
private String operationName;
/**
* The status of the operation.
*/
@JsonProperty(value = "status")
private String status;
/**
* The requested authorization for the operation.
*/
@JsonProperty(value = "authorization")
private String authorization;
/**
* The properties of the claims.
*/
@JsonProperty(value = "claims")
private String claims;
/**
* An operation ID used for troubleshooting.
*/
@JsonProperty(value = "correlationId")
private String correlationId;
/**
* The details of the operation.
*/
@JsonProperty(value = "httpRequest")
private String httpRequest;
/**
* Get the tenant ID of the resource.
*
* @return the tenantId value
*/
public String tenantId() {
return this.tenantId;
}
/**
* Set the tenant ID of the resource.
*
* @param tenantId the tenantId value to set
* @return the ResourceActionSuccessData object itself.
*/
public ResourceActionSuccessData withTenantId(String tenantId) {
this.tenantId = tenantId;
return this;
}
/**
* Get the subscription ID of the resource.
*
* @return the subscriptionId value
*/
public String subscriptionId() {
return this.subscriptionId;
}
/**
* Set the subscription ID of the resource.
*
* @param subscriptionId the subscriptionId value to set
* @return the ResourceActionSuccessData object itself.
*/
public ResourceActionSuccessData withSubscriptionId(String subscriptionId) {
this.subscriptionId = subscriptionId;
return this;
}
/**
* Get the resource group of the resource.
*
* @return the resourceGroup value
*/
public String resourceGroup() {
return this.resourceGroup;
}
/**
* Set the resource group of the resource.
*
* @param resourceGroup the resourceGroup value to set
* @return the ResourceActionSuccessData object itself.
*/
public ResourceActionSuccessData withResourceGroup(String resourceGroup) {
this.resourceGroup = resourceGroup;
return this;
}
/**
* Get the resource provider performing the operation.
*
* @return the resourceProvider value
*/
public String resourceProvider() {
return this.resourceProvider;
}
/**
* Set the resource provider performing the operation.
*
* @param resourceProvider the resourceProvider value to set
* @return the ResourceActionSuccessData object itself.
*/
public ResourceActionSuccessData withResourceProvider(String resourceProvider) {
this.resourceProvider = resourceProvider;
return this;
}
/**
* Get the URI of the resource in the operation.
*
* @return the resourceUri value
*/
public String resourceUri() {
return this.resourceUri;
}
/**
* Set the URI of the resource in the operation.
*
* @param resourceUri the resourceUri value to set
* @return the ResourceActionSuccessData object itself.
*/
public ResourceActionSuccessData withResourceUri(String resourceUri) {
this.resourceUri = resourceUri;
return this;
}
/**
* Get the operation that was performed.
*
* @return the operationName value
*/
public String operationName() {
return this.operationName;
}
/**
* Set the operation that was performed.
*
* @param operationName the operationName value to set
* @return the ResourceActionSuccessData object itself.
*/
public ResourceActionSuccessData withOperationName(String operationName) {
this.operationName = operationName;
return this;
}
/**
* Get the status of the operation.
*
* @return the status value
*/
public String status() {
return this.status;
}
/**
* Set the status of the operation.
*
* @param status the status value to set
* @return the ResourceActionSuccessData object itself.
*/
public ResourceActionSuccessData withStatus(String status) {
this.status = status;
return this;
}
/**
* Get the requested authorization for the operation.
*
* @return the authorization value
*/
public String authorization() {
return this.authorization;
}
/**
* Set the requested authorization for the operation.
*
* @param authorization the authorization value to set
* @return the ResourceActionSuccessData object itself.
*/
public ResourceActionSuccessData withAuthorization(String authorization) {
this.authorization = authorization;
return this;
}
/**
* Get the properties of the claims.
*
* @return the claims value
*/
public String claims() {
return this.claims;
}
/**
* Set the properties of the claims.
*
* @param claims the claims value to set
* @return the ResourceActionSuccessData object itself.
*/
public ResourceActionSuccessData withClaims(String claims) {
this.claims = claims;
return this;
}
/**
* Get an operation ID used for troubleshooting.
*
* @return the correlationId value
*/
public String correlationId() {
return this.correlationId;
}
/**
* Set an operation ID used for troubleshooting.
*
* @param correlationId the correlationId value to set
* @return the ResourceActionSuccessData object itself.
*/
public ResourceActionSuccessData withCorrelationId(String correlationId) {
this.correlationId = correlationId;
return this;
}
/**
* Get the details of the operation.
*
* @return the httpRequest value
*/
public String httpRequest() {
return this.httpRequest;
}
/**
* Set the details of the operation.
*
* @param httpRequest the httpRequest value to set
* @return the ResourceActionSuccessData object itself.
*/
public ResourceActionSuccessData withHttpRequest(String httpRequest) {
this.httpRequest = httpRequest;
return this;
}
}
| Azure/azure-sdk-for-java | sdk/eventgrid/microsoft-azure-eventgrid/src/main/java/com/microsoft/azure/eventgrid/models/ResourceActionSuccessData.java | Java | mit | 7,681 |
using Kliva.Models;
namespace Kliva.Services.Interfaces
{
public interface IApplicationInfoService
{
AppVersion AppVersion { get; }
}
} | timheuer/Kliva-1 | src/Kliva/Services/Interfaces/IApplicationInfoService.cs | C# | mit | 159 |
<div class="row">
<div class="mainvcontainer" style="background-image:url('<?php echo base_url('img/bg.jpg'); ?>'); min-height:900px;">
<div class="col-sm-3"></div>
<div class="animated bounceInDown col-sm-6">
<section class="panel" style="margin-top:90px;padding:50px;
box-shadow: 4px 12px 85px rgba(0,0,0,.9);
border: 1px solid #ccc;
border-radius: 10px;
">
<div class="panel-body">
<img src="<?php echo base_url("img/ig.png"); ?>" class=" img-responsive my-center" style="position:relative;">
<h1 class="text-center"> 404 error</h1>
Page not found!
</div>
</section>
</div>
<div class="col-sm-3"></div>
</div>
</div>
| freeztime/ignitedcms | application/views/admin/404/404.php | PHP | mit | 894 |
module.exports = {
entry: './client/index.js',
output: {
path: __dirname + '/public',
filename: 'bundle.js'
},
module: {
loaders: [{
test: /\.jsx?$/,
loader: 'babel-loader',
exclude: /node_modules/
}]
},
resolve: {
extensions: ['.js', '.jsx']
},
devtool: 'source-map'
}
| kelly-keating/kelly-keating.github.io | webpack.config.js | JavaScript | mit | 324 |
FactoryBot.define do
factory :category_node_base, class: Category::Node::Base, traits: [:cms_node] do
route { "category/base" }
end
factory :category_node_node, class: Category::Node::Node, traits: [:cms_node] do
route { "category/node" }
end
factory :category_node_page, class: Category::Node::Page, traits: [:cms_node] do
route { "category/page" }
end
end
| ShinjiTanimoto/shirasagi | spec/factories/category/nodes.rb | Ruby | mit | 384 |
import { ServiceMessage } from './proxy';
export interface FileInfo {
name: string;
type: string;
data: string;
}
export interface FileInputInfo {
name: string;
files: FileInfo[];
value: string;
}
export interface GetUploadedFilesServiceMessage extends ServiceMessage {
filePaths: string[];
}
export interface StoreUploadedFilesServiceMessage extends ServiceMessage {
data: string[];
fileNames: string[];
}
| miherlosev/testcafe-hammerhead | src/typings/upload.d.ts | TypeScript | mit | 447 |
package foo.bar;
import static org.junit.Assert.assertTrue;
import org.junit.Test;
public class FailingTests {
@Test
public void test0() {}
// Defects4J: flaky method
// @Test
// public void test0() {
// assertTrue(false);
// }
@Test
public void test1() {}
// Defects4J: flaky method
// @Test
// public void test1() {
// assertTrue(false);
// }
@Test
public void test2() {}
// Defects4J: flaky method
// @Test
// public void test2() {
// assertTrue(false);
// }
}
| jose/defects4j | framework/test/resources/output/foo/bar/FailingTests.java | Java | mit | 512 |
define( 'test2', ['test3', 'test4'], function( obj, str ){
return 'test2 is done, deps : [ ' + obj.name + str + ' ]';
}); | eleanors/EaseJS | core/test/assets/test2.js | JavaScript | mit | 122 |
from PyQt4 import QtCore, QtGui
import acq4.Manager
import acq4.pyqtgraph as pg
import acq4.pyqtgraph.opengl as gl
import numpy as np
import acq4.util.functions as fn
import re
man = acq4.Manager.getManager()
## update DB field to reflect dir meta info
#for i in db.select('Cell', ['rowid']):
#d = db.getDir('Cell', i[0])
#typ = d.info().get('type', '')
#db.update('Cell', {'type': typ}, rowid=i[0])
#print d, typ
global eventView, siteView, cells
eventView = 'events_view'
siteView = 'sites_view'
firstRun = False
if 'events' not in locals():
global events
events = {}
firstRun = True
win = QtGui.QMainWindow()
#cw = QtGui.QWidget()
layout = pg.LayoutWidget()
#layout = QtGui.QGridLayout()
#layout.setContentsMargins(0,0,0,0)
#layout.setSpacing(0)
#cw.setLayout(layout)
win.setCentralWidget(layout)
cellCombo = QtGui.QComboBox()
cellCombo.setSizeAdjustPolicy(cellCombo.AdjustToContents)
layout.addWidget(cellCombo)
reloadBtn = QtGui.QPushButton('reload')
layout.addWidget(reloadBtn)
separateCheck = QtGui.QCheckBox("color pre/post")
layout.addWidget(separateCheck)
colorCheck = QtGui.QCheckBox("color y position")
layout.addWidget(colorCheck)
errLimitSpin = pg.SpinBox(value=0.7, step=0.1)
layout.addWidget(errLimitSpin)
lengthRatioLimitSpin = pg.SpinBox(value=1.5, step=0.1)
layout.addWidget(lengthRatioLimitSpin)
postRgnStartSpin = pg.SpinBox(value=0.500, step=0.01, siPrefix=True, suffix='s')
layout.addWidget(postRgnStartSpin)
postRgnStopSpin = pg.SpinBox(value=0.700, step=0.01, siPrefix=True, suffix='s')
layout.addWidget(postRgnStopSpin)
spl1 = QtGui.QSplitter()
spl1.setOrientation(QtCore.Qt.Vertical)
layout.addWidget(spl1, row=1, col=0, rowspan=1, colspan=8)
pw1 = pg.PlotWidget()
spl1.addWidget(pw1)
pw1.setLabel('left', 'Amplitude', 'A')
pw1.setLabel('bottom', 'Decay Tau', 's')
spl2 = QtGui.QSplitter()
spl2.setOrientation(QtCore.Qt.Horizontal)
spl1.addWidget(spl2)
pw2 = pg.PlotWidget(labels={'bottom': ('time', 's')})
spl2.addWidget(pw2)
tab = QtGui.QTabWidget()
spl2.addWidget(tab)
## For viewing cell morphology
gv = pg.GraphicsView()
gv.setBackgroundBrush(pg.mkBrush('w'))
image = pg.ImageItem()
gv.addItem(image)
gv.enableMouse()
gv.setAspectLocked(True)
tab.addTab(gv, 'Morphology')
## 3D atlas
import acq4.analysis.atlas.CochlearNucleus as CN
atlas = CN.CNAtlasDisplayWidget()
atlas.showLabel('DCN')
atlas.showLabel('AVCN')
atlas.showLabel('PVCN')
tab.addTab(atlas, 'Atlas')
atlasPoints = gl.GLScatterPlotItem()
atlas.addItem(atlasPoints)
win.show()
win.resize(1000,800)
sp1 = pw1.scatterPlot([], pen=pg.mkPen(None), brush=(200,200,255,70), identical=True, size=8)
sp2 = pw1.scatterPlot([], pen=pg.mkPen(None), brush=(255,200,200,70), identical=True, size=8)
sp3 = pw1.scatterPlot([], pen=pg.mkPen(None), brush=(100,255,100,70), identical=True, size=8)
sp4 = pw1.scatterPlot([], pen=pg.mkPen(None), size=8)
print "Reading cell list..."
#import os, pickle
#md = os.path.abspath(os.path.split(__file__)[0])
#cacheFile = os.path.join(md, 'eventCache.p')
#if os.path.isfile(cacheFile):
#print "Read from cache..."
#ev = pickle.load(open(cacheFile, 'r'))
#else:
#pickle.dump(ev, open(cacheFile, 'w'))
## create views that link cell information to events/sites
db = man.getModule('Data Manager').currentDatabase()
if not db.hasTable(siteView):
print "Creating DB views."
db.createView(siteView, ['photostim_sites', 'DirTable_Protocol', 'DirTable_Cell']) ## seems to be unused.
if not db.hasTable(eventView):
db.createView(eventView, ['photostim_events', 'DirTable_Protocol', 'DirTable_Cell'])
cells = db.select(siteView, ['CellDir'], distinct=True)
cells = [c['CellDir'] for c in cells]
cells.sort(lambda a,b: cmp(a.name(), b.name()))
cellCombo.addItem('')
for c in cells:
cellCombo.addItem(c.name(relativeTo=man.baseDir))
#cellSpin.setMaximum(len(cells)-1)
print "Done."
def loadCell(cell, reloadData=False):
global events
if reloadData:
events.pop(cell, None)
if cell in events:
return
db = man.getModule('Data Manager').currentDatabase()
mod = man.dataModel
allEvents = []
hvals = {}
nEv = 0
positionCache = {}
tcache = {}
print "Loading all events for cell", cell
tot = db.select(eventView, 'count()', where={'CellDir': cell})[0]['count()']
print tot, "total events.."
with pg.ProgressDialog('Loading event data...', maximum=tot, wait=0) as dlg:
for ev in db.iterSelect(eventView, ['ProtocolSequenceDir', 'SourceFile', 'fitAmplitude', 'fitTime', 'fitDecayTau', 'fitRiseTau', 'fitTimeToPeak', 'fitLengthOverDecay', 'fitFractionalError', 'userTransform', 'CellType', 'CellDir', 'ProtocolDir'], where={'CellDir': cell}, toArray=True, chunkSize=200):
extra = np.empty(ev.shape, dtype=[('right', float), ('anterior', float), ('dorsal', float), ('holding', float)])
## insert holding levels
for i in range(len(ev)):
sd = ev[i]['ProtocolSequenceDir']
if sd not in hvals:
cf = ev[i]['SourceFile']
hvals[sd] = mod.getClampHoldingLevel(cf)
#print hvals[sd], cf
extra[i]['holding'] = hvals[sd]
## insert positions
for i in range(len(ev)):
protoDir = ev[i]['SourceFile'].parent()
key = protoDir
#key = (ev[i]['ProtocolSequenceDir'], ev[i]['SourceFile'])
if key not in positionCache:
#try:
#dh = ev[i]['ProtocolDir']
#p1 = pg.Point(dh.info()['Scanner']['position'])
#if key[0] not in tcache:
#tr = pg.SRTTransform()
#tr.restoreState(dh.parent().info()['userTransform'])
#tcache[key[0]] = tr
#trans = tcache[key[0]]
#p2 = trans.map(p1)
#pcache[key] = (p2.x(),p2.y())
#except:
#print key
#raise
rec = db.select('CochlearNucleus_Protocol', where={'ProtocolDir': protoDir})
if len(rec) == 0:
pos = (None, None, None)
elif len(rec) == 1:
pos = (rec[0]['right'], rec[0]['anterior'], rec[0]['dorsal'])
elif len(rec) == 2:
raise Exception("Multiple position records for %s!" % str(protoDir))
positionCache[key] = pos
extra[i]['right'] = positionCache[key][0]
extra[i]['anterior'] = positionCache[key][1]
extra[i]['dorsal'] = positionCache[key][2]
ev = fn.concatenateColumns([ev, extra])
allEvents.append(ev)
nEv += len(ev)
dlg.setValue(nEv)
if dlg.wasCanceled():
raise Exception('Canceled by user.')
ev = np.concatenate(allEvents)
numExSites = 0
numInSites = 0
for site in db.select(siteView, 'ProtocolSequenceDir', where={'CellDir': cell}):
h = hvals.get(site['ProtocolSequenceDir'],None)
if h is None:
continue
if h > -0.02:
numInSites += 1
elif h < -0.04:
numExSites += 1
events[cell] = (ev, numExSites, numInSites)
def init():
if not firstRun:
return
cellCombo.currentIndexChanged.connect(showCell)
separateCheck.toggled.connect(showCell)
colorCheck.toggled.connect(showCell)
errLimitSpin.valueChanged.connect(showCell)
lengthRatioLimitSpin.valueChanged.connect(showCell)
reloadBtn.clicked.connect(reloadCell)
for s in [sp1, sp2, sp3, sp4]:
s.sigPointsClicked.connect(plotClicked)
def plotClicked(plt, pts):
pt = pts[0]
#(id, fn, time) = pt.data
#[['SourceFile', 'ProtocolSequenceDir', 'fitTime']]
#fh = db.getDir('ProtocolSequence', id)[fn]
fh = pt.data()['SourceFile']
id = pt.data()['ProtocolSequenceDir']
time = pt.data()['fitTime']
data = fh.read()['Channel':'primary']
data = fn.besselFilter(data, 8e3)
p = pw2.plot(data, clear=True)
pos = time / data.xvals('Time')[-1]
arrow = pg.CurveArrow(p, pos=pos)
xr = pw2.viewRect().left(), pw2.viewRect().right()
if time < xr[0] or time > xr[1]:
w = xr[1]-xr[0]
pw2.setXRange(time-w/5., time+4*w/5., padding=0)
fitLen = pt.data()['fitDecayTau']*pt.data()['fitLengthOverDecay']
x = np.linspace(time, time+fitLen, fitLen * 50e3)
v = [pt.data()['fitAmplitude'], pt.data()['fitTime'], pt.data()['fitRiseTau'], pt.data()['fitDecayTau']]
y = fn.pspFunc(v, x, risePower=2.0) + data[np.argwhere(data.xvals('Time')>time)[0]-1]
pw2.plot(x, y, pen='b')
#plot.addItem(arrow)
def select(ev, ex=True):
#if source is not None:
#ev = ev[ev['CellDir']==source]
if ex:
ev = ev[ev['holding'] < -0.04] # excitatory events
ev = ev[(ev['fitAmplitude'] < 0) * (ev['fitAmplitude'] > -2e-10)]
else:
ev = ev[(ev['holding'] >= -0.02) * (ev['holding'] <= 0.01)] ## inhibitory events
ev = ev[(ev['fitAmplitude'] > 0) * (ev['fitAmplitude'] < 2e-10)]
ev = ev[(0 < ev['fitDecayTau']) * (ev['fitDecayTau'] < 0.2)] # select decay region
ev = ev[ev['fitFractionalError'] < errLimitSpin.value()]
ev = ev[ev['fitLengthOverDecay'] > lengthRatioLimitSpin.value()]
return ev
def reloadCell():
showCell(reloadData=True)
def showCell(**kwds):
pw2.clear()
reloadData = kwds.get('reloadData', False)
#global lock
#if lock:
#return
#lock = True
QtGui.QApplication.processEvents() ## prevents double-spin
#lock = False
cell = cells[cellCombo.currentIndex()-1]
dh = cell #db.getDir('Cell', cell)
loadCell(dh, reloadData=reloadData)
try:
image.setImage(dh['morphology.png'].read())
gv.setRange(image.sceneBoundingRect())
except:
image.setImage(np.zeros((2,2)))
pass
ev, numExSites, numInSites = events[cell]
ev2 = select(ev, ex=True)
ev3 = select(ev, ex=False)
if colorCheck.isChecked():
sp1.hide()
sp2.hide()
sp3.hide()
sp4.show()
start = postRgnStart()
stop = postRgnStop()
ev2post = ev2[(ev2['fitTime']>start) * (ev2['fitTime']<stop)]
ev3post = ev3[(ev3['fitTime']>start) * (ev3['fitTime']<stop)]
ev4 = np.concatenate([ev2post, ev3post])
yMax = ev4['dorsal'].max()
yMin = ev4['dorsal'].min()
brushes = []
for i in range(len(ev4)):
hue = 0.6*((ev4[i]['dorsal']-yMin) / (yMax-yMin))
brushes.append(pg.hsvColor(hue, 1.0, 1.0, 0.3))
#pts.append({
#'pos': (ev4[i]['fitDecayTau'], ev4[i]['fitAmplitude']),
#'brush': pg.hsvColor(hue, 1, 1, 0.3),
#'data': ev4[i]
#})
sp4.setData(x=ev4['fitDecayTau'], y=ev4['fitAmplitude'], symbolBrush=brushes, data=ev4)
else:
sp1.show()
sp2.show()
#sp3.show()
sp4.hide()
## excitatory
if separateCheck.isChecked():
pre = ev2[ev2['fitTime']< preRgnStop()]
post = ev2[(ev2['fitTime'] > postRgnStart()) * (ev2['fitTime'] < postRgnStop())]
else:
pre = ev2
sp1.setData(x=pre['fitDecayTau'], y=pre['fitAmplitude'], data=pre);
#print "Cell ", cell
#print " excitatory:", np.median(ev2['fitDecayTau']), np.median(ev2['fitAmplitude'])
## inhibitory
if separateCheck.isChecked():
pre = ev3[ev3['fitTime']< preRgnStop()]
post2 = ev3[(ev3['fitTime'] > postRgnStart()) * (ev3['fitTime'] < postRgnStop())]
post = np.concatenate([post, post2])
else:
pre = ev3
sp2.setData(x=pre['fitDecayTau'], y=pre['fitAmplitude'], data=pre);
#print " inhibitory:", np.median(ev2['fitDecayTau']), np.median(ev2['fitAmplitude'])
if separateCheck.isChecked():
sp3.setData(x=post['fitDecayTau'], y=post['fitAmplitude'], data=post)
sp3.show()
else:
sp3.hide()
try:
typ = ev2[0]['CellType']
except:
typ = ev3[0]['CellType']
sr = spontRate(ev2, numExSites)
sri = spontRate(ev3, numInSites)
title = "%s -- %s --- <span style='color: #99F;'>ex:</span> %s %s %s %0.1fHz --- <span style='color: #F99;'>in:</span> %s %s %s %0.1fHz" % (
dh.name(relativeTo=dh.parent().parent().parent()),
typ,
pg.siFormat(np.median(ev2['fitTimeToPeak']), error=np.std(ev2['fitTimeToPeak']), space=False, suffix='s'),
pg.siFormat(np.median(ev2['fitDecayTau']), error=np.std(ev2['fitDecayTau']), space=False, suffix='s'),
pg.siFormat(np.median(ev2['fitAmplitude']), error=np.std(ev2['fitAmplitude']), space=False, suffix='A'),
sr,
pg.siFormat(np.median(ev3['fitTimeToPeak']), error=np.std(ev3['fitTimeToPeak']), space=False, suffix='s'),
pg.siFormat(np.median(ev3['fitDecayTau']), error=np.std(ev3['fitDecayTau']), space=False, suffix='s'),
pg.siFormat(np.median(ev3['fitAmplitude']), error=np.std(ev3['fitAmplitude']), space=False, suffix='A'),
sri)
print re.sub(r'<[^>]+>', '', title)
pw1.setTitle(title)
### show cell in atlas
#rec = db.select('CochlearNucleus_Cell', where={'CellDir': cell})
#pts = []
#if len(rec) > 0:
#pos = (rec[0]['right'], rec[0]['anterior'], rec[0]['dorsal'])
#pts = [{'pos': pos, 'size': 100e-6, 'color': (0.7, 0.7, 1.0, 1.0)}]
### show event positions
evSpots = {}
for rec in ev:
p = (rec['right'], rec['anterior'], rec['dorsal'])
evSpots[p] = None
pos = np.array(evSpots.keys())
atlasPoints.setData(pos=pos, )
def spontRate(ev, n):
## This is broken. It does not take into account recordings that had no events.
ev = ev[ev['fitTime'] < preRgnStop()]
#count = {}
#dirs = set()
#for i in range(len(ev)):
#key = (ev[i]['ProtocolSequenceDir'], ev[i]['SourceFile'])
#dirs.add(set)
#if key not in count:
#count[key] = 0
#count[key] += 1
#sr = np.mean([v/(preRgnStop()) for v in count.itervalues()])
if n == 0:
return 0
return len(ev) / (preRgnStop() * n)
def preRgnStop():
return postRgnStartSpin.value() - 0.002
def postRgnStart():
return postRgnStartSpin.value() + 0.002
def postRgnStop():
return postRgnStopSpin.value()
init() | hiuwo/acq4 | acq4/analysis/scripts/eventExplorer.py | Python | mit | 15,462 |
import React, { PureComponent } from 'react';
class ProgressBar extends PureComponent {
render() {
const { progress, className, percent = 100 } = this.props;
return (
<div className={className}>
{progress && <div className="progress">
<div
className="progress-bar progress-bar-striped progress-bar-animated"
role="progressbar"
style={{ width: `${percent}%` }}
/>
</div>}
</div>
);
}
}
export default ProgressBar;
| Apozhidaev/terminal.mobi | src/components/ProgressBar/index.js | JavaScript | mit | 517 |
var DEFAULT_HANDEDNESS = require('../constants').DEFAULT_HANDEDNESS;
var AXIS_LABELS = ['x', 'y', 'z', 'w'];
var NUM_HANDS = 2; // Number of hands in a pair. Should always be 2.
/**
* Called on controller component `.play` handlers.
* Check if controller matches parameters and inject tracked-controls component.
* Handle event listeners.
* Generate controllerconnected or controllerdisconnected events.
*
* @param {object} component - Tracked controls component.
* @param {object} idPrefix - Prefix to match in gamepad id if any.
* @param {object} queryObject - Map of values to match.
*/
module.exports.checkControllerPresentAndSetup = function (component, idPrefix, queryObject) {
var el = component.el;
var isPresent = isControllerPresent(component, idPrefix, queryObject);
// If component was previously paused and now playing, re-add event listeners.
// Handle the event listeners here since this helper method is control of calling
// `.addEventListeners` and `.removeEventListeners`.
if (component.controllerPresent && !component.controllerEventsActive) {
component.addEventListeners();
}
// Nothing changed, no need to do anything.
if (isPresent === component.controllerPresent) { return isPresent; }
component.controllerPresent = isPresent;
// Update controller presence.
if (isPresent) {
component.injectTrackedControls();
component.addEventListeners();
el.emit('controllerconnected', {name: component.name, component: component});
} else {
component.removeEventListeners();
el.emit('controllerdisconnected', {name: component.name, component: component});
}
};
/**
* Enumerate controller (that have pose) and check if they match parameters.
*
* @param {object} component - Tracked controls component.
* @param {object} idPrefix - Prefix to match in gamepad id if any.
* @param {object} queryObject - Map of values to match.
*/
function isControllerPresent (component, idPrefix, queryObject) {
var gamepads;
var sceneEl = component.el.sceneEl;
var trackedControlsSystem;
var filterControllerIndex = queryObject.index || 0;
if (!idPrefix) { return false; }
trackedControlsSystem = sceneEl && sceneEl.systems['tracked-controls'];
if (!trackedControlsSystem) { return false; }
gamepads = trackedControlsSystem.controllers;
if (!gamepads.length) { return false; }
return !!findMatchingController(gamepads, null, idPrefix, queryObject.hand,
filterControllerIndex);
}
module.exports.isControllerPresent = isControllerPresent;
/**
* Walk through the given controllers to find any where the device ID equals
* filterIdExact, or startsWith filterIdPrefix.
* A controller where this considered true is considered a 'match'.
*
* For each matching controller:
* If filterHand is set, and the controller:
* is handed, we further verify that controller.hand equals filterHand.
* is unhanded (controller.hand is ''), we skip until we have found a
* number of matching controllers that equals filterControllerIndex
* If filterHand is not set, we skip until we have found the nth matching
* controller, where n equals filterControllerIndex
*
* The method should be called with one of: [filterIdExact, filterIdPrefix] AND
* one or both of: [filterHand, filterControllerIndex]
*
* @param {object} controllers - Array of gamepads to search
* @param {string} filterIdExact - If set, used to find controllers with id === this value
* @param {string} filterIdPrefix - If set, used to find controllers with id startsWith this value
* @param {object} filterHand - If set, further filters controllers with matching 'hand' property
* @param {object} filterControllerIndex - Find the nth matching controller,
* where n equals filterControllerIndex. defaults to 0.
*/
function findMatchingController (controllers, filterIdExact, filterIdPrefix, filterHand,
filterControllerIndex) {
var controller;
var i;
var matchingControllerOccurence = 0;
var targetControllerMatch = filterControllerIndex || 0;
for (i = 0; i < controllers.length; i++) {
controller = controllers[i];
// Determine if the controller ID matches our criteria.
if (filterIdPrefix && !controller.id.startsWith(filterIdPrefix)) {
continue;
}
if (!filterIdPrefix && controller.id !== filterIdExact) { continue; }
// If the hand filter and controller handedness are defined we compare them.
if (filterHand && controller.hand && filterHand !== controller.hand) { continue; }
// If we have detected an unhanded controller and the component was asking
// for a particular hand, we need to treat the controllers in the array as
// pairs of controllers. This effectively means that we need to skip
// NUM_HANDS matches for each controller number, instead of 1.
if (filterHand && !controller.hand) {
targetControllerMatch = NUM_HANDS * filterControllerIndex + ((filterHand === DEFAULT_HANDEDNESS) ? 0 : 1);
}
// We are looking for the nth occurence of a matching controller
// (n equals targetControllerMatch).
if (matchingControllerOccurence === targetControllerMatch) { return controller; }
++matchingControllerOccurence;
}
return undefined;
}
module.exports.findMatchingController = findMatchingController;
/**
* Emit specific `moved` event(s) if axes changed based on original axismoved event.
*
* @param {object} component - Controller component in use.
* @param {array} axesMapping - For example `{thumbstick: [0, 1]}`.
* @param {object} evt - Event to process.
*/
module.exports.emitIfAxesChanged = function (component, axesMapping, evt) {
var axes;
var buttonType;
var changed;
var detail;
var j;
for (buttonType in axesMapping) {
axes = axesMapping[buttonType];
changed = false;
for (j = 0; j < axes.length; j++) {
if (evt.detail.changed[axes[j]]) { changed = true; }
}
if (!changed) { continue; }
// Axis has changed. Emit the specific moved event with axis values in detail.
detail = {};
for (j = 0; j < axes.length; j++) {
detail[AXIS_LABELS[j]] = evt.detail.axis[axes[j]];
}
component.el.emit(buttonType + 'moved', detail);
}
};
/**
* Handle a button event and reemits the events.
*
* @param {string} id - id of the button.
* @param {string} evtName - name of the reemitted event
* @param {object} component - reference to the component
* @param {string} hand - handedness of the controller: left or right.
*/
module.exports.onButtonEvent = function (id, evtName, component, hand) {
var mapping = hand ? component.mapping[hand] : component.mapping;
var buttonName = mapping.buttons[id];
component.el.emit(buttonName + evtName);
if (component.updateModel) {
component.updateModel(buttonName, evtName);
}
};
| RSpace/aframe | src/utils/tracked-controls.js | JavaScript | mit | 6,862 |
const prettyBytes = require("pretty-bytes");
(prettyBytes(123): string);
// $ExpectError
prettyBytes("123");
// $ExpectError
prettyBytes(true);
| mwalkerwells/flow-typed | definitions/npm/pretty-bytes_v4.x.x/test_pretty-bytes_v4.x.x.js | JavaScript | mit | 147 |
/*
-----------------------------------------------------------------------------
This source file is part of OGRE
(Object-oriented Graphics Rendering Engine)
For the latest info, see http://www.ogre3d.org/
Copyright (c) 2000-2011 Torus Knot Software Ltd
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
-----------------------------------------------------------------------------
*/
#include "OgrePlatform.h"
#if OGRE_PLATFORM == OGRE_PLATFORM_SYMBIAN
#include <coecntrl.h>
#endif
#include "SampleBrowser.h"
#if OGRE_PLATFORM == OGRE_PLATFORM_WIN32
#define WIN32_LEAN_AND_MEAN
#include "windows.h"
#include "OgreString.h"
#elif OGRE_PLATFORM == OGRE_PLATFORM_APPLE
#include "SampleBrowser_OSX.h"
#elif OGRE_PLATFORM == OGRE_PLATFORM_APPLE_IOS
#include "SampleBrowser_iOS.h"
#elif OGRE_PLATFORM == OGRE_PLATFORM_NACL
#include "SampleBrowser_NaCl.h"
#endif
#if OGRE_PLATFORM != OGRE_PLATFORM_SYMBIAN && OGRE_PLATFORM != OGRE_PLATFORM_NACL
#if OGRE_PLATFORM == OGRE_PLATFORM_WIN32
INT WINAPI WinMain(HINSTANCE, HINSTANCE, LPSTR cmdLine, INT)
#else
int main(int argc, char *argv[])
#endif
{
#if OGRE_PLATFORM == OGRE_PLATFORM_APPLE_IOS
NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];
int retVal = UIApplicationMain(argc, argv, @"UIApplication", @"AppDelegate");
[pool release];
return retVal;
#elif (OGRE_PLATFORM == OGRE_PLATFORM_APPLE) && __LP64__
NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];
mAppDelegate = [[AppDelegate alloc] init];
[[NSApplication sharedApplication] setDelegate:mAppDelegate];
int retVal = NSApplicationMain(argc, (const char **) argv);
[pool release];
return retVal;
#else
try
{
bool nograb = false;
#if OGRE_PLATFORM != OGRE_PLATFORM_WIN32
if (argc >= 2 && Ogre::String(argv[1]) == "nograb")
nograb = true;
#else
// somewhat hacky, but much simpler than other solutions
if (Ogre::String(cmdLine).find("nograb") != Ogre::String::npos)
nograb = true;
#endif
OgreBites::SampleBrowser sb (nograb);
sb.go();
}
catch (Ogre::Exception& e)
{
#if OGRE_PLATFORM == OGRE_PLATFORM_WIN32
MessageBoxA(NULL, e.getFullDescription().c_str(), "An exception has occurred!", MB_ICONERROR | MB_TASKMODAL);
#else
std::cerr << "An exception has occurred: " << e.getFullDescription().c_str() << std::endl;
#endif
}
#endif
return 0;
}
#endif // OGRE_PLATFORM != OGRE_PLATFORM_SYMBIAN
| bhlzlx/ogre | Samples/Browser/src/SampleBrowser.cpp | C++ | mit | 3,418 |
<?php
/**
* PHPExcel
*
* Copyright (c) 2006 - 2008 PHPExcel
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*
* @category PHPExcel
* @package PHPExcel_Shared
* @copyright Copyright (c) 2006 - 2008 PHPExcel (http://www.codeplex.com/PHPExcel)
* @license http://www.gnu.org/licenses/old-licenses/lgpl-2.1.txt LGPL
* @version ##VERSION##, ##DATE##
*/
/** PHPExcel_Cell */
require_once 'PHPExcel/Cell.php';
/** PHPExcel_Style_NumberFormat */
require_once 'PHPExcel/Style/NumberFormat.php';
/**
* PHPExcel_Shared_Date
*
* @category PHPExcel
* @package PHPExcel_Shared
* @copyright Copyright (c) 2006 - 2008 PHPExcel (http://www.codeplex.com/PHPExcel)
*/
class PHPExcel_Shared_Date
{
/** constants */
const CALENDAR_WINDOWS_1900 = 1900; // Base date of 1st Jan 1900 = 1.0
const CALENDAR_MAC_1904 = 1904; // Base date of 2nd Jan 1904 = 1.0
private static $ExcelBaseDate = self::CALENDAR_WINDOWS_1900;
public static $dateTimeObjectType = 'DateTime';
/**
* Set the Excel calendar (Windows 1900 or Mac 1904)
*
* @param integer $baseDate Excel base date
* @return boolean Success or failure
*/
public static function setExcelCalendar($baseDate) {
if (($baseDate == self::CALENDAR_WINDOWS_1900) ||
($baseDate == self::CALENDAR_MAC_1904)) {
self::$ExcelBaseDate = $baseDate;
return True;
}
return False;
} // function setExcelCalendar()
/**
* Return the Excel calendar (Windows 1900 or Mac 1904)
*
* @return integer $baseDate Excel base date
*/
public static function getExcelCalendar() {
return self::$ExcelBaseDate;
} // function getExcelCalendar()
/**
* Convert a date from Excel to PHP
*
* @param long $dateValue Excel date/time value
* @return long PHP serialized date/time
*/
public static function ExcelToPHP($dateValue = 0) {
if (self::$ExcelBaseDate == self::CALENDAR_WINDOWS_1900) {
$myExcelBaseDate = 25569;
// Adjust for the spurious 29-Feb-1900 (Day 60)
if ($dateValue < 60) {
--$myExcelBaseDate;
}
} else {
$myExcelBaseDate = 24107;
}
// Perform conversion
if ($dateValue >= 1) {
$utcDays = $dateValue - $myExcelBaseDate;
$returnValue = round($utcDays * 24 * 60 * 60);
} else {
$hours = round($dateValue * 24);
$mins = round($dateValue * 24 * 60) - round($hours * 60);
$secs = round($dateValue * 24 * 60 * 60) - round($hours * 60 * 60) - round($mins * 60);
$returnValue = mktime($hours, $mins, $secs);
}
// Return
return $returnValue;
} // function ExcelToPHP()
/**
* Convert a date from Excel to a PHP Date/Time object
*
* @param long $dateValue Excel date/time value
* @return long PHP date/time object
*/
public static function ExcelToPHPObject($dateValue = 0) {
$dateTime = self::ExcelToPHP($dateValue);
$days = floor($dateTime / 86400);
$time = round((($dateTime / 86400) - $days) * 86400);
$hours = round($time / 3600);
$minutes = round($time / 60) - ($hours * 60);
$seconds = round($time) - ($hours * 3600) - ($minutes * 60);
$dateObj = date_create('1-Jan-1970+'.$days.' days');
$dateObj->setTime($hours,$minutes,$seconds);
return $dateObj;
} // function ExcelToPHPObject()
/**
* Convert a date from PHP to Excel
*
* @param mixed $dateValue PHP serialized date/time or date object
* @return mixed Excel date/time value
* or boolean False on failure
*/
public static function PHPToExcel($dateValue = 0) {
$saveTimeZone = date_default_timezone_get();
date_default_timezone_set('UTC');
$retValue = False;
if ((is_object($dateValue)) && ($dateValue instanceof self::$dateTimeObjectType)) {
$retValue = self::FormattedPHPToExcel( $dateValue->format('Y'), $dateValue->format('m'), $dateValue->format('d'),
$dateValue->format('H'), $dateValue->format('i'), $dateValue->format('s')
);
} elseif (is_numeric($dateValue)) {
$retValue = self::FormattedPHPToExcel( date('Y',$dateValue), date('m',$dateValue), date('d',$dateValue),
date('H',$dateValue), date('i',$dateValue), date('s',$dateValue)
);
}
date_default_timezone_set($saveTimeZone);
return $retValue;
} // function PHPToExcel()
/**
* FormattedPHPToExcel
*
* @param long $year
* @param long $month
* @param long $day
* @param long $hours
* @param long $minutes
* @param long $seconds
* @return long Excel date/time value
*/
public static function FormattedPHPToExcel($year, $month, $day, $hours=0, $minutes=0, $seconds=0) {
if (self::$ExcelBaseDate == self::CALENDAR_WINDOWS_1900) {
//
// Fudge factor for the erroneous fact that the year 1900 is treated as a Leap Year in MS Excel
// This affects every date following 28th February 1900
//
$excel1900isLeapYear = True;
if (($year == 1900) && ($month <= 2)) { $excel1900isLeapYear = False; }
$myExcelBaseDate = 2415020;
} else {
$myExcelBaseDate = 2416481;
$excel1900isLeapYear = False;
}
// Julian base date Adjustment
if ($month > 2) {
$month = $month - 3;
} else {
$month = $month + 9;
--$year;
}
// Calculate the Julian Date, then subtract the Excel base date (JD 2415020 = 31-Dec-1899 Giving Excel Date of 0)
$century = substr($year,0,2);
$decade = substr($year,2,2);
$excelDate = floor((146097 * $century) / 4) + floor((1461 * $decade) / 4) + floor((153 * $month + 2) / 5) + $day + 1721119 - $myExcelBaseDate + $excel1900isLeapYear;
$excelTime = (($hours * 3600) + ($minutes * 60) + $seconds) / 86400;
return $excelDate + $excelTime;
} // function FormattedPHPToExcel()
/**
* Is a given cell a date/time?
*
* @param PHPExcel_Cell $pCell
* @return boolean
*/
public static function isDateTime(PHPExcel_Cell $pCell) {
return self::isDateTimeFormat($pCell->getParent()->getStyle($pCell->getCoordinate())->getNumberFormat());
} // function isDateTime()
/**
* Is a given number format a date/time?
*
* @param PHPExcel_Style_NumberFormat $pFormat
* @return boolean
*/
public static function isDateTimeFormat(PHPExcel_Style_NumberFormat $pFormat) {
return self::isDateTimeFormatCode($pFormat->getFormatCode());
} // function isDateTimeFormat()
private static $possibleCharacters = array('y', 'm', 'd', 'H', 'i', 's');
/**
* Is a given number format code a date/time?
*
* @param string $pFormatCode
* @return boolean
*/
public static function isDateTimeFormatCode($pFormatCode = '') {
// Switch on formatcode
switch ($pFormatCode) {
case PHPExcel_Style_NumberFormat::FORMAT_DATE_YYYYMMDD:
case PHPExcel_Style_NumberFormat::FORMAT_DATE_DDMMYYYY:
case PHPExcel_Style_NumberFormat::FORMAT_DATE_DMYSLASH:
case PHPExcel_Style_NumberFormat::FORMAT_DATE_DMYMINUS:
case PHPExcel_Style_NumberFormat::FORMAT_DATE_DMMINUS:
case PHPExcel_Style_NumberFormat::FORMAT_DATE_MYMINUS:
case PHPExcel_Style_NumberFormat::FORMAT_DATE_DATETIME:
case PHPExcel_Style_NumberFormat::FORMAT_DATE_TIME1:
case PHPExcel_Style_NumberFormat::FORMAT_DATE_TIME2:
case PHPExcel_Style_NumberFormat::FORMAT_DATE_TIME3:
case PHPExcel_Style_NumberFormat::FORMAT_DATE_TIME4:
case PHPExcel_Style_NumberFormat::FORMAT_DATE_TIME5:
case PHPExcel_Style_NumberFormat::FORMAT_DATE_TIME6:
case PHPExcel_Style_NumberFormat::FORMAT_DATE_TIME7:
case PHPExcel_Style_NumberFormat::FORMAT_DATE_TIME8:
case PHPExcel_Style_NumberFormat::FORMAT_DATE_YYYYMMDDSLASH:
return true;
}
// Try checking all possible characters
foreach (self::$possibleCharacters as $possibleCharacter) {
if (eregi($possibleCharacter, $pFormatCode)) {
return true;
}
}
// No date...
return false;
} // function isDateTimeFormatCode()
}
| ALTELMA/OfficeEquipmentManager | application/libraries/PHPExcel/branches/v1.6.3/Classes/PHPExcel/Shared/Date.php | PHP | mit | 8,411 |
from settings.common import *
DATABASES = {
'default': {
"ENGINE": "django.db.backends.mysql",
"NAME": "mhfowler",
"USER": "root",
"PASSWORD": "",
"HOST": "localhost",
"PORT": ""
}
} | mhfowler/mhfowler | settings/local.py | Python | mit | 239 |
import React from 'react'
import UiValidate from '../../../../components/forms/validation/UiValidate'
import MaskedInput from '../../../../components/forms/inputs/MaskedInput'
import UiDatepicker from '../../../../components/forms/inputs/UiDatepicker'
const validationOptions = {
// Rules for form validation
rules: {
name: {
required: true
},
email: {
required: true,
email: true
},
review: {
required: true,
minlength: 20
},
quality: {
required: true
},
reliability: {
required: true
},
overall: {
required: true
}
},
// Messages for form validation
messages: {
name: {
required: 'Please enter your name'
},
email: {
required: 'Please enter your email address',
email: '<i class="fa fa-warning"></i><strong>Please enter a VALID email addres</strong>'
},
review: {
required: 'Please enter your review'
},
quality: {
required: 'Please rate quality of the product'
},
reliability: {
required: 'Please rate reliability of the product'
},
overall: {
required: 'Please rate the product'
}
}
};
export default class ReviewForm extends React.Component {
onSubmit(e) {
e.preventDefault();
console.log('submit stuff')
}
render() {
return (
<UiValidate options={validationOptions}>
<form id="review-form" className="smart-form" noValidate="novalidate" onSubmit={this.onSubmit}>
<header>
Review form
</header>
<fieldset>
<section>
<label className="input"> <i className="icon-append fa fa-user"/>
<input type="text" name="name" id="name" placeholder="Your name"/>
</label>
</section>
<section>
<label className="input"> <i className="icon-append fa fa-envelope-o"/>
<input type="email" name="email" id="email" placeholder="Your e-mail"/>
</label>
</section>
<section>
<label className="label"/>
<label className="textarea"> <i className="icon-append fa fa-comment"/>
<textarea rows="3" name="review" id="review" placeholder="Text of the review"/>
</label>
</section>
<section>
<div className="rating">
<input type="radio" name="quality" id="quality-5"/>
<label htmlFor="quality-5"><i className="fa fa-star"/></label>
<input type="radio" name="quality" id="quality-4"/>
<label htmlFor="quality-4"><i className="fa fa-star"/></label>
<input type="radio" name="quality" id="quality-3"/>
<label htmlFor="quality-3"><i className="fa fa-star"/></label>
<input type="radio" name="quality" id="quality-2"/>
<label htmlFor="quality-2"><i className="fa fa-star"/></label>
<input type="radio" name="quality" id="quality-1"/>
<label htmlFor="quality-1"><i className="fa fa-star"/></label>
Quality of the product
</div>
<div className="rating">
<input type="radio" name="reliability" id="reliability-5"/>
<label htmlFor="reliability-5"><i className="fa fa-star"/></label>
<input type="radio" name="reliability" id="reliability-4"/>
<label htmlFor="reliability-4"><i className="fa fa-star"/></label>
<input type="radio" name="reliability" id="reliability-3"/>
<label htmlFor="reliability-3"><i className="fa fa-star"/></label>
<input type="radio" name="reliability" id="reliability-2"/>
<label htmlFor="reliability-2"><i className="fa fa-star"/></label>
<input type="radio" name="reliability" id="reliability-1"/>
<label htmlFor="reliability-1"><i className="fa fa-star"/></label>
Reliability of the product
</div>
<div className="rating">
<input type="radio" name="overall" id="overall-5"/>
<label htmlFor="overall-5"><i className="fa fa-star"/></label>
<input type="radio" name="overall" id="overall-4"/>
<label htmlFor="overall-4"><i className="fa fa-star"/></label>
<input type="radio" name="overall" id="overall-3"/>
<label htmlFor="overall-3"><i className="fa fa-star"/></label>
<input type="radio" name="overall" id="overall-2"/>
<label htmlFor="overall-2"><i className="fa fa-star"/></label>
<input type="radio" name="overall" id="overall-1"/>
<label htmlFor="overall-1"><i className="fa fa-star"/></label>
Overall rating
</div>
</section>
</fieldset>
<footer>
<button type="submit" className="btn btn-primary">
Validate Form
</button>
</footer>
</form>
</UiValidate>
)
}
} | backpackcoder/world-in-flames | src/app/routes/forms/components/layouts/ReviewForm.js | JavaScript | mit | 5,184 |
<?php
/*
* This file is part of the Sylius package.
*
* (c) Paweł Jędrzejewski
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
declare(strict_types=1);
namespace Sylius\Bundle\AdminBundle\EmailManager;
use Sylius\Bundle\CoreBundle\Mailer\Emails;
use Sylius\Component\Core\Model\OrderInterface;
use Sylius\Component\Mailer\Sender\SenderInterface;
final class OrderEmailManager implements OrderEmailManagerInterface
{
public function __construct(private SenderInterface $emailSender)
{
}
public function sendConfirmationEmail(OrderInterface $order): void
{
$this->emailSender->send(
Emails::ORDER_CONFIRMATION_RESENT,
[$order->getCustomer()->getEmail()],
[
'order' => $order,
'channel' => $order->getChannel(),
'localeCode' => $order->getLocaleCode(),
]
);
}
}
| SyliusBot/Sylius | src/Sylius/Bundle/AdminBundle/EmailManager/OrderEmailManager.php | PHP | mit | 992 |
#!/usr/bin/env python3
#
# Copyright (c) 2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Lint format strings: This program checks that the number of arguments passed
# to a variadic format string function matches the number of format specifiers
# in the format string.
import argparse
import re
import sys
FALSE_POSITIVES = [
("src/batchedlogger.h", "strprintf(fmt, args...)"),
("src/dbwrapper.cpp", "vsnprintf(p, limit - p, format, backup_ap)"),
("src/index/base.cpp", "FatalError(const char* fmt, const Args&... args)"),
("src/netbase.cpp", "LogConnectFailure(bool manual_connection, const char* fmt, const Args&... args)"),
("src/qt/networkstyle.cpp", "strprintf(appName, gArgs.GetDevNetName())"),
("src/qt/networkstyle.cpp", "strprintf(titleAddText, gArgs.GetDevNetName())"),
("src/rpc/rpcevo.cpp", "strprintf(it->second, nParamNum)"),
("src/stacktraces.cpp", "strprintf(fmtStr, i, si.pc, lstr, fstr)"),
("src/statsd_client.cpp", "snprintf(d->errmsg, sizeof(d->errmsg), \"could not create socket, err=%m\")"),
("src/statsd_client.cpp", "snprintf(d->errmsg, sizeof(d->errmsg), \"sendto server fail, host=%s:%d, err=%m\", d->host.c_str(), d->port)"),
("src/util.cpp", "strprintf(_(COPYRIGHT_HOLDERS), _(COPYRIGHT_HOLDERS_SUBSTITUTION))"),
("src/util.cpp", "strprintf(COPYRIGHT_HOLDERS, COPYRIGHT_HOLDERS_SUBSTITUTION)"),
("src/wallet/wallet.h", "WalletLogPrintf(std::string fmt, Params... parameters)"),
("src/wallet/wallet.h", "LogPrintf((\"%s \" + fmt).c_str(), GetDisplayName(), parameters...)"),
]
def parse_function_calls(function_name, source_code):
"""Return an array with all calls to function function_name in string source_code.
Preprocessor directives and C++ style comments ("//") in source_code are removed.
>>> len(parse_function_calls("foo", "foo();bar();foo();bar();"))
2
>>> parse_function_calls("foo", "foo(1);bar(1);foo(2);bar(2);")[0].startswith("foo(1);")
True
>>> parse_function_calls("foo", "foo(1);bar(1);foo(2);bar(2);")[1].startswith("foo(2);")
True
>>> len(parse_function_calls("foo", "foo();bar();// foo();bar();"))
1
>>> len(parse_function_calls("foo", "#define FOO foo();"))
0
"""
assert(type(function_name) is str and type(source_code) is str and function_name)
lines = [re.sub("// .*", " ", line).strip()
for line in source_code.split("\n")
if not line.strip().startswith("#")]
return re.findall(r"[^a-zA-Z_](?=({}\(.*).*)".format(function_name), " " + " ".join(lines))
def normalize(s):
"""Return a normalized version of string s with newlines, tabs and C style comments ("/* ... */")
replaced with spaces. Multiple spaces are replaced with a single space.
>>> normalize(" /* nothing */ foo\tfoo /* bar */ foo ")
'foo foo foo'
"""
assert(type(s) is str)
s = s.replace("\n", " ")
s = s.replace("\t", " ")
s = re.sub("/\*.*?\*/", " ", s)
s = re.sub(" {2,}", " ", s)
return s.strip()
ESCAPE_MAP = {
r"\n": "[escaped-newline]",
r"\t": "[escaped-tab]",
r'\"': "[escaped-quote]",
}
def escape(s):
"""Return the escaped version of string s with "\\\"", "\\n" and "\\t" escaped as
"[escaped-backslash]", "[escaped-newline]" and "[escaped-tab]".
>>> unescape(escape("foo")) == "foo"
True
>>> escape(r'foo \\t foo \\n foo \\\\ foo \\ foo \\"bar\\"')
'foo [escaped-tab] foo [escaped-newline] foo \\\\\\\\ foo \\\\ foo [escaped-quote]bar[escaped-quote]'
"""
assert(type(s) is str)
for raw_value, escaped_value in ESCAPE_MAP.items():
s = s.replace(raw_value, escaped_value)
return s
def unescape(s):
"""Return the unescaped version of escaped string s.
Reverses the replacements made in function escape(s).
>>> unescape(escape("bar"))
'bar'
>>> unescape("foo [escaped-tab] foo [escaped-newline] foo \\\\\\\\ foo \\\\ foo [escaped-quote]bar[escaped-quote]")
'foo \\\\t foo \\\\n foo \\\\\\\\ foo \\\\ foo \\\\"bar\\\\"'
"""
assert(type(s) is str)
for raw_value, escaped_value in ESCAPE_MAP.items():
s = s.replace(escaped_value, raw_value)
return s
def parse_function_call_and_arguments(function_name, function_call):
"""Split string function_call into an array of strings consisting of:
* the string function_call followed by "("
* the function call argument #1
* ...
* the function call argument #n
* a trailing ");"
The strings returned are in escaped form. See escape(...).
>>> parse_function_call_and_arguments("foo", 'foo("%s", "foo");')
['foo(', '"%s",', ' "foo"', ')']
>>> parse_function_call_and_arguments("foo", 'foo("%s", "foo");')
['foo(', '"%s",', ' "foo"', ')']
>>> parse_function_call_and_arguments("foo", 'foo("%s %s", "foo", "bar");')
['foo(', '"%s %s",', ' "foo",', ' "bar"', ')']
>>> parse_function_call_and_arguments("fooprintf", 'fooprintf("%050d", i);')
['fooprintf(', '"%050d",', ' i', ')']
>>> parse_function_call_and_arguments("foo", 'foo(bar(foobar(barfoo("foo"))), foobar); barfoo')
['foo(', 'bar(foobar(barfoo("foo"))),', ' foobar', ')']
>>> parse_function_call_and_arguments("foo", "foo()")
['foo(', '', ')']
>>> parse_function_call_and_arguments("foo", "foo(123)")
['foo(', '123', ')']
>>> parse_function_call_and_arguments("foo", 'foo("foo")')
['foo(', '"foo"', ')']
"""
assert(type(function_name) is str and type(function_call) is str and function_name)
remaining = normalize(escape(function_call))
expected_function_call = "{}(".format(function_name)
assert(remaining.startswith(expected_function_call))
parts = [expected_function_call]
remaining = remaining[len(expected_function_call):]
open_parentheses = 1
in_string = False
parts.append("")
for char in remaining:
parts.append(parts.pop() + char)
if char == "\"":
in_string = not in_string
continue
if in_string:
continue
if char == "(":
open_parentheses += 1
continue
if char == ")":
open_parentheses -= 1
if open_parentheses > 1:
continue
if open_parentheses == 0:
parts.append(parts.pop()[:-1])
parts.append(char)
break
if char == ",":
parts.append("")
return parts
def parse_string_content(argument):
"""Return the text within quotes in string argument.
>>> parse_string_content('1 "foo %d bar" 2')
'foo %d bar'
>>> parse_string_content('1 foobar 2')
''
>>> parse_string_content('1 "bar" 2')
'bar'
>>> parse_string_content('1 "foo" 2 "bar" 3')
'foobar'
>>> parse_string_content('1 "foo" 2 " " "bar" 3')
'foo bar'
>>> parse_string_content('""')
''
>>> parse_string_content('')
''
>>> parse_string_content('1 2 3')
''
"""
assert(type(argument) is str)
string_content = ""
in_string = False
for char in normalize(escape(argument)):
if char == "\"":
in_string = not in_string
elif in_string:
string_content += char
return string_content
def count_format_specifiers(format_string):
"""Return the number of format specifiers in string format_string.
>>> count_format_specifiers("foo bar foo")
0
>>> count_format_specifiers("foo %d bar foo")
1
>>> count_format_specifiers("foo %d bar %i foo")
2
>>> count_format_specifiers("foo %d bar %i foo %% foo")
2
>>> count_format_specifiers("foo %d bar %i foo %% foo %d foo")
3
>>> count_format_specifiers("foo %d bar %i foo %% foo %*d foo")
4
"""
assert(type(format_string) is str)
n = 0
in_specifier = False
for i, char in enumerate(format_string):
if format_string[i - 1:i + 1] == "%%" or format_string[i:i + 2] == "%%":
pass
elif char == "%":
in_specifier = True
n += 1
elif char in "aAcdeEfFgGinopsuxX":
in_specifier = False
elif in_specifier and char == "*":
n += 1
return n
def main():
parser = argparse.ArgumentParser(description="This program checks that the number of arguments passed "
"to a variadic format string function matches the number of format "
"specifiers in the format string.")
parser.add_argument("--skip-arguments", type=int, help="number of arguments before the format string "
"argument (e.g. 1 in the case of fprintf)", default=0)
parser.add_argument("function_name", help="function name (e.g. fprintf)", default=None)
parser.add_argument("file", nargs="*", help="C++ source code file (e.g. foo.cpp)")
args = parser.parse_args()
exit_code = 0
for filename in args.file:
with open(filename, "r", encoding="utf-8") as f:
for function_call_str in parse_function_calls(args.function_name, f.read()):
parts = parse_function_call_and_arguments(args.function_name, function_call_str)
relevant_function_call_str = unescape("".join(parts))[:512]
if (f.name, relevant_function_call_str) in FALSE_POSITIVES:
continue
if len(parts) < 3 + args.skip_arguments:
exit_code = 1
print("{}: Could not parse function call string \"{}(...)\": {}".format(f.name, args.function_name, relevant_function_call_str))
continue
argument_count = len(parts) - 3 - args.skip_arguments
format_str = parse_string_content(parts[1 + args.skip_arguments])
format_specifier_count = count_format_specifiers(format_str)
if format_specifier_count != argument_count:
exit_code = 1
print("{}: Expected {} argument(s) after format string but found {} argument(s): {}".format(f.name, format_specifier_count, argument_count, relevant_function_call_str))
continue
sys.exit(exit_code)
if __name__ == "__main__":
main()
| dashpay/dash | test/lint/lint-format-strings.py | Python | mit | 10,365 |
using System;
namespace Starscream.Web.Api.Responses
{
public class SuccessfulLoginResponse<T>
{
public SuccessfulLoginResponse()
{
}
public SuccessfulLoginResponse(T token, string name, DateTime expires, string[] claims)
{
Token = token;
Name = name;
Expires = expires;
Claims = claims;
}
public T Token { get; set; }
public string Name { get; set; }
public DateTime Expires { get; set; }
public string[] Claims { get; set; }
}
} | AcklenAvenue/Starscream | src/Starscream.Web/Api/Responses/SuccessfulLoginResponse.cs | C# | mit | 574 |
package ij.plugin;
import ij.*;
import ij.gui.GenericDialog;
import ij.process.*;
import ij.measure.Calibration;
/** This plugin implements the Image/Stacks/Tools/Grouped Z Project command. */
public class GroupedZProjector implements PlugIn {
private static int method = ZProjector.AVG_METHOD;
private int groupSize;
public void run(String arg) {
ImagePlus imp = IJ.getImage();
int size = imp.getStackSize();
if (size==1) {
IJ.error("Z Project", "This command requires a stack");
return;
}
if (imp.isHyperStack()) {
new ZProjector().run("");
return;
}
if (!showDialog(imp))
return;
ImagePlus imp2 = groupZProject(imp, method, groupSize);
imp2.setCalibration(imp.getCalibration());
Calibration cal = imp2.getCalibration();
cal.pixelDepth *= groupSize;
if (imp!=null)
imp2.show();
}
public ImagePlus groupZProject(ImagePlus imp, int method, int groupSize) {
if (method<0 || method>=ZProjector.METHODS.length)
return null;
imp.setDimensions(1, groupSize, imp.getStackSize()/groupSize);
ZProjector zp = new ZProjector(imp);
zp.setMethod(method);
zp.setStartSlice(1);
zp.setStopSlice(groupSize);
zp.doHyperStackProjection(true);
return zp.getProjection();
}
boolean showDialog(ImagePlus imp) {
int size = imp.getStackSize();
GenericDialog gd = new GenericDialog("Z Project");
gd.addChoice("Projection method:", ZProjector.METHODS, ZProjector.METHODS[method]);
gd.addNumericField("Group size:", size, 0);
String factors = "Valid factors: ";
int i = 1, count = 0;
while (i <= size && count<10) {
if (size % i == 0) {
count++; factors += " "+ i +",";
}
i++;
}
gd.setInsets(10,0,0);
gd.addMessage(factors+"...");
gd.showDialog();
if (gd.wasCanceled())
return false;
method = gd.getNextChoiceIndex();
groupSize = (int)gd.getNextNumber();
if (groupSize<1 || groupSize>size || (size%groupSize)!=0) {
IJ.error("ZProject", "Group size must divide evenly into the stack size.");
return false;
}
return true;
}
} | steliann/objectj | src/ij/plugin/GroupedZProjector.java | Java | mit | 2,033 |
import type { NextPage } from 'next'
import Link from 'next/link'
import Layout from '../components/Layout'
const AboutPage: NextPage = () => (
<Layout title="About | Next.js + Temporal Example">
<h1>About</h1>
<p>This is the about page</p>
<p>
<Link href="/">
<a>Go home</a>
</Link>
</p>
</Layout>
)
export default AboutPage
| zeit/next.js | examples/with-temporal/pages/about.tsx | TypeScript | mit | 368 |
class TodoListsController < ApplicationController
before_action :require_user
before_action :set_todo_list, only: %i[edit update destroy email]
before_action :set_back_link, except: %i[index show]
def index
@todo_lists = current_user.todo_lists
end
def show; end
def new
@todo_list = current_user.todo_lists.new
end
def edit; end
def create
@todo_list = current_user.todo_lists.new(todo_list_params)
if @todo_list.save
redirect_to todo_list_todo_items_path(@todo_list), success: 'Todo list was successfully created.'
else
render :new, error: 'Todo list could not be created.'
end
end
def update
if @todo_list.update(todo_list_params)
redirect_to todo_list_todo_items_path(@todo_list), success: 'Todo list was successfully updated.'
else
render :edit, error: 'Todo list could not be updated.'
end
end
def destroy
@todo_list.destroy
redirect_to todo_lists_url, success: 'Todo list was successfully deleted.'
end
def email
destination = params[:destination]
notifier = Notifier.todo_list(@todo_list, destination)
if destination =~ /@/ && notifier.deliver_now
redirect_to todo_list_todo_items_path(@todo_list), success: 'Todo list send.'
else
redirect_to todo_list_todo_items_path(@todo_list), error: 'Todo list could not be sent.'
end
end
private
def set_back_link
go_back_link_to todo_lists_path
end
def set_todo_list
@todo_list = current_user.todo_lists.find(params[:id])
end
def todo_list_params
params.require(:todo_list).permit(:title)
end
end
| kirbrown/od-ot | app/controllers/todo_lists_controller.rb | Ruby | mit | 1,619 |
jQuery(document).ready(function($) {
var $start_date = $("#podlove_season_start_date")
$start_date.datepicker({
dateFormat: $.datepicker.ISO_8601,
changeMonth: true,
changeYear: true
});
$start_date.closest("div").on("click", function() {
$start_date.datepicker("show");
});
});
| katrinleinweber/podlove-publisher | lib/modules/seasons/js/admin.js | JavaScript | mit | 311 |
from abc import ABCMeta, abstractmethod
class ConfigParser:
"""Configuration file parser ABC"""
__metaclass__ = ABCMeta
"""
Config parser interface
All parsers for configuaration will
need to comply with this interface
so lmdo can understand it
"""
@abstractmethod
def get(self, *args, **kwargs):
"""Get value from config file"""
pass
@abstractmethod
def validate(self, *args, **kwargs):
"""Validate config file"""
pass
| liangrog/lmdo | lmdo/config_parser.py | Python | mit | 506 |
<?php
/*
* This file is part of the Elcodi package.
*
* Copyright (c) 2014-2015 Elcodi.com
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*
* Feel free to edit as you please, and have fun.
*
* @author Marc Morera <yuhu@mmoreram.com>
* @author Aldo Chiecchia <zimage@tiscali.it>
* @author Elcodi Team <tech@elcodi.com>
*/
namespace Elcodi\Component\Tax\Factory;
use Elcodi\Component\Core\Factory\Abstracts\AbstractFactory;
use Elcodi\Component\Tax\Entity\Tax;
/**
* Class TaxFactory
*/
class TaxFactory extends AbstractFactory
{
/**
* Creates an instance of an entity.
*
* This method must return always an empty instance
*
* @return Tax Empty entity
*/
public function create()
{
/**
* @var Tax $tax
*/
$classNamespace = $this->getEntityNamespace();
$tax = new $classNamespace();
$tax
->setName('')
->setDescription('')
->setValue(0)
->setEnabled(false);
return $tax;
}
}
| shopery/elcodi | src/Elcodi/Component/Tax/Factory/TaxFactory.php | PHP | mit | 1,124 |
/**
* The MIT License (MIT)
*
* Copyright (c) 2014-2016 Yegor Bugayenko
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included
* in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package org.takes.facets.fork;
import java.util.Arrays;
import lombok.ToString;
import org.takes.Take;
import org.takes.tk.TkWrap;
/**
* Take that acts on request with specified methods only.
* <p>The class is immutable and thread-safe.
*
* @author Aleksey Popov (alopen@yandex.ru)
* @version $Id$
* @since 0.16.1
*/
@ToString(callSuper = true)
public class TkMethods extends TkWrap {
/**
* Ctor.
*
* @param take Original take
* @param methods Methods the take should act
*/
public TkMethods(final Take take, final String ...methods) {
super(
new TkFork(new FkMethods(Arrays.asList(methods), take))
);
}
}
| dalifreire/takes | src/main/java/org/takes/facets/fork/TkMethods.java | Java | mit | 1,835 |
<?php
declare(strict_types=1);
namespace GrumPHPTest\Unit\Task;
use GrumPHP\Task\CloverCoverage;
use GrumPHP\Task\Context\GitPreCommitContext;
use GrumPHP\Task\Context\RunContext;
use GrumPHP\Task\TaskInterface;
use GrumPHP\Test\Task\AbstractTaskTestCase;
use GrumPHP\Util\Filesystem;
use Prophecy\Argument;
use Prophecy\Prophecy\ObjectProphecy;
class CloverCoverageTest extends AbstractTaskTestCase
{
/**
* @var Filesystem|ObjectProphecy
*/
private $filesystem;
protected function provideTask(): TaskInterface
{
$this->filesystem = $this->prophesize(Filesystem::class);
return new CloverCoverage(
$this->filesystem->reveal()
);
}
public function provideConfigurableOptions(): iterable
{
yield 'defaults' => [
[
'clover_file' => 'coverage.xml',
],
[
'level' => 100,
'clover_file' => 'coverage.xml',
]
];
}
public function provideRunContexts(): iterable
{
yield 'run-context' => [
true,
$this->mockContext(RunContext::class)
];
yield 'pre-commit-context' => [
true,
$this->mockContext(GitPreCommitContext::class)
];
yield 'other' => [
false,
$this->mockContext()
];
}
public function provideFailsOnStuff(): iterable
{
yield 'fileDoesntExist' => [
[
'clover_file' => 'coverage.xml',
],
$this->mockContext(RunContext::class, ['coverage.xml']),
function () {
$this->filesystem->exists('coverage.xml')->willReturn(false);
},
'Invalid input file provided'
];
yield 'level0' => [
[
'clover_file' => 'coverage.xml',
'level' => 0,
],
$this->mockContext(RunContext::class, ['coverage.xml']),
function () {
$this->filesystem->exists('coverage.xml')->willReturn(true);
},
'An integer checked percentage must be given as second parameter'
];
yield 'levelNotReached' => [
[
'clover_file' => 'coverage.xml',
'level' => 100,
],
$this->mockContext(RunContext::class, ['coverage.xml']),
function () {
$this->filesystem->exists('coverage.xml')->willReturn(true);
$this->filesystem->readFromFileInfo(Argument::which('getBasename', 'coverage.xml'))->willReturn(
file_get_contents(TEST_BASE_PATH.'/fixtures/clover_coverage/60-percent-coverage.xml')
);
},
'Code coverage is 60%, which is below the accepted 100%'
];
}
public function providePassesOnStuff(): iterable
{
yield 'levelReached' => [
[
'clover_file' => 'coverage.xml',
'level' => 50,
],
$this->mockContext(RunContext::class, ['coverage.xml']),
function () {
$this->filesystem->exists('coverage.xml')->willReturn(true);
$this->filesystem->readFromFileInfo(Argument::which('getBasename', 'coverage.xml'))->willReturn(
file_get_contents(TEST_BASE_PATH.'/fixtures/clover_coverage/60-percent-coverage.xml')
);
},
];
}
public function provideSkipsOnStuff(): iterable
{
yield 'noMetricElements' => [
[
'clover_file' => 'coverage.xml',
'level' => 50,
],
$this->mockContext(RunContext::class, ['coverage.xml']),
function () {
$this->filesystem->exists('coverage.xml')->willReturn(true);
$this->filesystem->readFromFileInfo(Argument::which('getBasename', 'coverage.xml'))->willReturn(
file_get_contents(TEST_BASE_PATH.'/fixtures/clover_coverage/0-elements.xml')
);
}
];
}
}
| veewee/grumphp | test/Unit/Task/CloverCoverageTest.php | PHP | mit | 4,169 |
<div>
<div id="dashboard-main">
<!-- <form id="create_note" class="form-horizontal" method="post" action="<?= site_url('api/create_note') ?>">
<div class="input-append">
<input tabindex="1" type="text" name="title" placeholder="Note Title" />
<input tabindex="3" type="submit" class="btn btn-success" value="Create" />
</div>
<div class="clearfix"></div>
<textarea tabindex="2" name="content"></textarea>
</form>-->
<div id="list_user">
<span class="ajax-loader-gray"></span>
</div>
<div id="category-actions">
<div id="category-button"><a style="outline: medium none;" hidefocus="true" href="<?php echo site_url('customer/'); ?>"><img src="<?php echo base_url() ?>/public/img/images/customers-bt-w.png" style=" width:40px; height:40px; margin-top:10px" alt="View All Loads"></a></div>
<div class="loads-title" id="category-title" style="height: 32px;padding-top: 15px;"><h2 style="font-weight: 600;letter-spacing: 1px;padding-right: 5px;">CUSTOMERS</h2></div>
<div id="category-button"><a style="outline: medium none;" hidefocus="true" href="<?php echo site_url('customer/'); ?>"><img src="<?php echo base_url() ?>/public/img/images/loads-list-bt-45w.png" width="45" height="70" alt="View All Loads"></a></div>
<?php
if (in_array("customer/add", $roles)) {
?>
<div id="category-button"><a style="outline: medium none;" hidefocus="true" href="<?php echo site_url('customer/add'); ?>"><img src="<?php echo base_url() ?>/public/img/images/loads-add-bt-45w.png" width="45" height="70" alt="Add a Load"></a></div>
<?php } ?>
<div id="category-button"></div>
<div id="category-search" class="search-customer"></div>
<div id="category-search" class="search-carrier"></div>
<div id="category-search" class="search-loads"></div>
</div>
<div class="table-responsive">
<table id="list_load" class="table table-hover table-bordered table-striped">
<thead>
<tr style="background-color: #EBEBEB">
<th>#</th>
<th>Name</th>
<th>Phone</th>
<th>Email</th>
<th>Address</th>
<th>City</th>
<th>State</th>
<th>Country</th>
<?php echo in_array('customer/edit', $roles) || in_array('customer/trash', $roles) ? '<th>Actions</th>' : ''; ?>
</tr>
</thead>
<tbody>
<?php
$i = 1;
foreach ($customers as $customer => $row) {
$country = '';
switch ($row['country']) {
case 1:
$country = 'USA';
break;
case 2:
$country = 'Canada';
break;
default:
echo "Your favorite color is neither red, blue, nor green!";
}
echo '<tr id="customer_' . $row['idts_customer'] . '">';
echo '<td>' . $i++ . '</td>';
echo '<td>' . $row['name'] . '</td>';
echo '<td>' . $row['phone'] . '</td>';
echo '<td>' . $row['email'] . '</td>';
echo '<td>' . $row['address'] . '</td>';
echo '<td>' . $row['city'] . '</td>';
echo '<td>' . $row['state'] . '</td>';
echo '<td>' . $country . '</td>';
echo in_array('customer/edit', $roles) || in_array('customer/trash', $roles) ? '<td>' : '';
echo in_array('customer/edit', $roles) ? '<a href="customer/edit/' . $row['idts_customer'] . '">Edit</a>' : '';
echo in_array('customer/trash', $roles) ? ' <a id="' . $row['idts_customer'] . '" class="trash">Trash</a>' : '';
echo in_array('customer/edit', $roles) || in_array('customer/trash', $roles) ? '</td>' : '';
echo '</tr>';
}
?>
</tbody>
</table>
</div>
<!-- Load view dialog -->
<div class="modal fade" id="load_view_dialog" tabindex="-1" role="dialog" aria-labelledby="myModalLabel">
<div class="modal-dialog" role="document">
<div class="modal-content">
<div class="modal-header">
<button type="button" class="close" data-dismiss="modal" aria-label="Close"><span aria-hidden="true">×</span></button>
<h4 class="modal-title" id="myModalLabel">Load Details</h4>
</div>
<div class="modal-body">
<fieldset>
<!-- Form Name -->
<legend>Load Details</legend>
<div id="load_detail"></div>
</fieldset>
</div>
<div class="modal-footer">
<button type="button" class="btn btn-default" data-dismiss="modal">Close</button>
</div>
</div>
</div>
</div>
</div>
</div>
<!-- Hidden content -->
<div id="popover_content" style="display: none">
<ul>
<li><a data-id="4" class="editLink" title="Edit this Load" href=""><i class="icon-pencil"></i> Edit</a></li>
<li><a data-id="4" class="editLink" title="Send message to driver" href=""><i class="icon-user"></i> Send Message</a></li>
<li></li>
</ul>
</div>
<style>
.popover-content ul{
margin: 0 0 10px 5px;
}
.popover-content ul li{
list-style: none;
}
</style>
<script>
$(function () {
$('#list_load tbody tr').on('click', function (event) {
$(this).addClass('highlight').siblings().removeClass('highlight');
});
$('body').on('click', '.po', function (evt) {
evt.preventDefault();
var load_id = $(this).data('load_id');
var editHtml = '<ul><li data-load_edit="' + load_id + '">Edit</li></ul>';
// $('#abc').append(editHtml);
var popover = $(this).attr('id');
$('#popover_content ul li a.editLink').attr('href', 'load/update/' + popover)
$(this).popover({
"trigger": "manual",
"html": "true",
"title": 'Load Options # ' + $(this).html() + '<span style="margin-left:15px;" class="pull-right"><a href="#" onclick="$("#' + popover + '").popover("toggle");" class="text-danger popover-close" data-bypass="true" title="Close"><i class="fa fa-close"></i>X</a></span>',
"content": $('#popover_content').html()
// "content":'<ul><li><a data-id="4" title="Edit this Load" href="load/update/'+popover+'"><i class="icon-pencil"></i> Edit</a> </li></ul>'
});
$(this).popover('toggle');
});
$('body').on('click', '.trash', function (evt) {
evt.preventDefault();
var customer = $(this);
var id = customer.attr('id');
var r = confirm("Confirm trashing customer?");
if (r == true) {
$.ajax({
type: "POST",
url: 'customer/change_status/' + id + '/' + 0,
async: true,
dataType: "json",
beforeSend: function () {
$('#result_destination').html('Loading...');
$('#result_destination').show();
},
success: function (data) {
if (data.status == 1) {
$('#customer_' + id).remove();
console.log('customer deleted');
} else {
alert('User could not be trashed. Please contact administrator.');
}
}
});
}
});
});
</script> | digimark1/trackngo-dash | application/views/customer/customer_view.php | PHP | mit | 8,679 |
namespace PlugInDemo
{
public class PlugInDemoConsts
{
public const string LocalizationSourceName = "PlugInDemo";
}
} | s-takatsu/aspnetboilerplate-samples | PlugInDemo/PlugInDemo.Core/PlugInDemoConsts.cs | C# | mit | 140 |
/*jshint laxbreak:true */
var assert = require('assert');
var metadata = require('./index');
describe('metadata.cmd()', function() {
it('returns command without exif data', function() {
var cmd = 'identify -format "name=\nsize=%[size]\nformat=%m\n'
+ 'colorspace=%[colorspace]\nheight=%[height]\nwidth=%[width]\n'
+ 'orientation=%[orientation]\n" /foo/bar/baz';
assert.equal(metadata.cmd('/foo/bar/baz'), cmd);
});
it('returns command with exif data', function() {
var cmd = 'identify -format "name=\nsize=%[size]\nformat=%m\n'
+ 'colorspace=%[colorspace]\nheight=%[height]\nwidth=%[width]\n'
+ 'orientation=%[orientation]\n%[exif:*]" /foo/bar/baz';
assert.equal(metadata.cmd('/foo/bar/baz', {exif: true}), cmd);
});
});
describe('metadata.parse()', function() {
var path = '/foo/bar/baz.jpg';
it('returns object for single value', function() {
assert.deepEqual(metadata.parse(path, 'foo=bar'), {
path: path,
foo: 'bar'
});
});
it('returns object for metadata string', function() {
assert.deepEqual(metadata.parse(path, 'foo=bar\nbar=foo'), {
path: path,
foo: 'bar',
bar: 'foo'
});
});
it('skips empty lines', function() {
assert.deepEqual(metadata.parse(path, 'foo=bar\n\nbar=foo\n\n'), {
path: path,
foo: 'bar',
bar: 'foo'
});
});
it('returns correct size for bogus value', function() {
assert.deepEqual(metadata.parse(path, 'size=4.296MBB'), {
path: path,
size: 4504682
});
});
it('returns size in bytes', function() {
assert.deepEqual(metadata.parse(path, 'size=20MB'), {
path: path,
size: 20 * 1024 * 1024
});
});
it('returns RGB for sRGB colorspace', function() {
assert.deepEqual(metadata.parse(path, 'colorspace=sRGB'), {
path: path,
colorspace: 'RGB'
});
});
it('returns "" for Undefined orientation', function() {
assert.deepEqual(metadata.parse(path, 'orientation=Undefined'), {
path: path,
orientation: ''
});
});
it('returns height and widt for auto-orient', function() {
var meta = 'width=100\nheight=150\norientation=';
var opts = {autoOrient: true};
var orientation = [
'TopLeft', 'TopRight', 'BottomRight', 'BottomLeft',
'LeftTop', 'RightTop', 'RightBottom', 'LeftBottom'
];
for (var i = 0; i < 4; i++) {
assert.deepEqual(metadata.parse(path, meta + orientation[i], opts), {
height: 150,
width: 100,
path: path,
orientation: orientation[i]
});
}
for (var j = 4; j < 8; j++) {
assert.deepEqual(metadata.parse(path, meta + orientation[j], opts), {
height: 100,
width: 150,
path: path,
orientation: orientation[j]
});
}
});
});
describe('metadata()', function() {
it('returns metadata for image', function(done) {
metadata('./assets/image.jpg', { exif: false }, function(err, data) {
assert.ifError(err);
assert.equal(data.path, './assets/image.jpg');
assert.equal(data.name, '');
assert.equal(data.size, 4504682);
assert.equal(data.format, 'JPEG');
assert.equal(data.colorspace, 'RGB');
assert.equal(data.height, 3456);
assert.equal(data.width, 5184);
assert.equal(data.orientation, 'TopLeft');
assert.equal(typeof data.exif, 'undefined');
done();
});
});
it('returns metadata for image with exif data', function(done) {
metadata('./assets/image.jpg', { exif: true }, function(err, data) {
assert.ifError(err);
assert.equal(data.path, './assets/image.jpg');
assert.equal(data.name, '');
assert.equal(data.size, 4504682);
assert.equal(data.format, 'JPEG');
assert.equal(data.colorspace, 'RGB');
assert.equal(data.height, 3456);
assert.equal(data.width, 5184);
assert.equal(data.orientation, 'TopLeft');
assert.equal(typeof data.exif, 'object');
assert.equal(Object.keys(data.exif).length, 36);
assert.equal(data.exif.ApertureValue, '37/8');
done();
});
});
it('returns correct height and width for auto-orient', function(done) {
metadata('./assets/orient.jpg', { autoOrient: true }, function(err, data) {
assert.ifError(err);
assert.equal(data.height, 3264);
assert.equal(data.width, 2448);
done();
});
});
});
| Turistforeningen/node-im-metadata | test.js | JavaScript | mit | 4,447 |
<?php
use miloschuman\highcharts\Highcharts;
use yii\web\JsExpression;
use yii\data\SqlDataProvider;
use yii\grid\GridView;
use app\models\Cashbook;
$this->title = 'Economizzer';
$this->title = Yii::t('app', 'Overview');
?>
<div class="dashboard-index">
<div class="row">
<div class="col-md-6"><?php echo $this->render('_menu'); ?></div>
<div class="col-md-6"></div>
</div>
<hr/>
<div class="row">
<div class="col-md-6">
<div class="panel panel-default">
<div class="panel-heading"><strong><?php echo Yii::t('app', 'Performance');?></strong></div>
<div class="panel-body" style="height: 250px;">
<?php
/* testing get currency by locale */
//$sum = 4362;
//echo Yii::$app->formatter->asCurrency(str_replace(',', '', $sum));
$balance = ((round((int)$currentmonth_revenue)-abs(round((int)$currentmonth_expense))) >=0 ? (round((int)$currentmonth_revenue)-abs(round((int)$currentmonth_expense))) : 0);
echo Highcharts::widget([
'options' => [
'credits' => ['enabled' => false],
'chart'=> ['height'=> 200,],
'title' => [
'text' => Yii::t('app', 'Expense'),
'align' => 'center',
'verticalAlign' => 'middle',
'style' => [
'fontSize'=> '12px',
'color' => '#e74c3c',
]
],
'colors'=> ['#18bc9c','#e74c3c'],
'tooltip'=> ['pointFormat'=> Yii::t('app', 'Percentage').': <b>{point.percentage:.1f}%</b>'],
'plotOptions'=> [
'pie'=> [
'allowPointSelect'=> true,
'cursor'=> 'pointer',
'size'=> '100%',
'innerSize'=> '60%',
'dataLabels'=> ['enabled'=> false,],
'center'=> ['50%', '55%'],
]
],
'series'=> [[
'type'=> 'pie',
'name'=> 'Valor',
'data'=> [
[Yii::t('app', 'Balance'), 'y'=> $balance],
[Yii::t('app', 'Expense'), 'y'=> abs(round((int)$currentmonth_expense)), ['sliced'=> true]],
]
]]
]
]);
?>
</div></div></div>
<div class="col-md-6">
<div class="panel panel-default">
<div class="panel-heading"><strong><?php echo Yii::t('app', 'Evolution');?></strong></div>
<div class="panel-body" style="height: 250px;">
<?php
if(round((int)($currentmonth_revenue+($previousmonth_revenue - abs((int)$previousmonth_expense)))) >= abs(round((int)$currentmonth_expense)))
{
$overbalance = "<div>". Yii::t('app', 'Monthly balance'). "<h3 class=\"label label-success pull-right\">".Yii::t('app', 'Positive')."</h3></div>";
}else{
$overbalance = "<div>". Yii::t('app', 'Monthly balance'). "<span class=\"label label-danger pull-right\">".Yii::t('app', 'Negative')."</span></div>";
}
echo $overbalance;
?>
<table class="table table-bordered text-center">
<thead>
<tr>
<th class="text-center"><i class="fa fa-line-chart"></i></th>
<th class="text-center"><?php echo Yii::t('app', 'Previous Month');?></th>
<th class="text-center"><?php echo Yii::t('app', 'Current Month');?></th>
</tr>
</thead>
<tbody>
<tr class="text-success">
<td><?php echo Yii::t('app', 'Revenue');?></td>
<td><?php echo Yii::t('app', '$')." ".number_format((float)$previousmonth_revenue,2);?></td>
<td><?php echo Yii::t('app', '$')." ".number_format((float)($currentmonth_revenue+($previousmonth_revenue - abs((float)$previousmonth_expense))),2);?></td>
</tr>
<tr class="text-danger">
<td><?php echo Yii::t('app', 'Expense');?></td>
<td><?php echo Yii::t('app', '$')." ".number_format(abs((float)$previousmonth_expense),2);?></td>
<td><?php echo Yii::t('app', '$')." ".number_format(abs((float)$currentmonth_expense),2);?></td>
</tr>
<tr class="text-primary">
<td><?php echo Yii::t('app', 'Balance');?></td>
<td><?php echo Yii::t('app', '$')." ".number_format(((float)$previousmonth_revenue - abs((float)$previousmonth_expense)),2);?></td>
<td><?php echo Yii::t('app', '$')." ".number_format(((float)$currentmonth_revenue+($previousmonth_revenue - abs((float)$previousmonth_expense)) - abs((float)$currentmonth_expense)),2);?></td>
</tr>
</tbody>
</table>
</div>
</div>
</div>
</div>
<div class="row">
<div class="col-md-6">
<div class="panel panel-default">
<div class="panel-heading"><strong><?php echo Yii::t('app', 'Expenses by Category');?></strong></div>
<div class="panel-body">
<?php
echo Highcharts::widget([
'options' => [
'credits' => ['enabled' => false],
'title' => [
'text' => '',
],
'xAxis' => [
'categories' => $cat,
],
'yAxis' => [
'min' => 0,
'title' => '',
],
'series' => [
[
'type' => 'bar',
'colorByPoint'=> true,
'name' => Yii::t('app', 'Category'),
'data' => $value,
'colors' => $color,
],
],
]
]);
?>
</div>
</div>
</div>
<div class="col-md-6">
<div class="panel panel-default">
<div class="panel-heading"><strong><?php echo Yii::t('app', 'Expenses by Segment');?></strong></div>
<div class="panel-body">
<?php
echo Highcharts::widget([
'options' => [
'credits' => ['enabled' => false],
'title' => [
'text' => '',
],
'xAxis' => [
'categories' => $seg,
],
'yAxis' => [
'min' => 0,
'title' => '',
],
'series' => [
[
'type' => 'column',
'colorByPoint'=> true,
'name' => Yii::t('app', 'Segment'),
'data' => $total,
'colors' => $colorseg,
],
],
]
]);
?>
</div>
</div>
</div>
</div>
</div>
| squatteur/economizzer | views/dashboard/overview.php | PHP | mit | 7,769 |
// ***********************************************************************
// Copyright (c) 2009 Charlie Poole, Rob Prouse
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// ***********************************************************************
using System;
using System.Collections.Generic;
using NUnit.Framework.Constraints;
namespace NUnit.Framework.Syntax
{
[TestFixture]
public class ArbitraryConstraintMatching
{
Constraint custom = new CustomConstraint();
Constraint another = new AnotherConstraint();
[Test]
public void CanMatchCustomConstraint()
{
IResolveConstraint constraint = new ConstraintExpression().Matches(custom);
Assert.That(constraint.Resolve().ToString(), Is.EqualTo("<custom>"));
}
[Test]
public void CanMatchCustomConstraintAfterPrefix()
{
IResolveConstraint constraint = Is.All.Matches(custom);
Assert.That(constraint.Resolve().ToString(), Is.EqualTo("<all <custom>>"));
}
[Test]
public void CanMatchCustomConstraintsUnderAndOperator()
{
IResolveConstraint constraint = Is.All.Matches(custom).And.Matches(another);
Assert.That(constraint.Resolve().ToString(), Is.EqualTo("<all <and <custom> <another>>>"));
}
[Test]
public void CanMatchPredicate()
{
IResolveConstraint constraint = new ConstraintExpression().Matches(new Predicate<int>(IsEven));
Assert.That(constraint.Resolve().ToString(), Is.EqualTo("<predicate>"));
Assert.That(42, constraint);
}
bool IsEven(int num)
{
return (num & 1) == 0;
}
[Test]
public void CanMatchLambda()
{
IResolveConstraint constraint = new ConstraintExpression().Matches<int>( (x) => (x & 1) == 0);
Assert.That(constraint.Resolve().ToString(), Is.EqualTo("<predicate>"));
Assert.That(42, constraint);
}
class CustomConstraint : Constraint
{
public override ConstraintResult ApplyTo<TActual>(TActual actual)
{
throw new NotImplementedException();
}
}
class AnotherConstraint : CustomConstraint
{
}
[Test]
public void ApplyMatchesToProperty()
{
var unit = new Unit();
// All forms should pass
Assert.That(unit, Has.Property("Items").With.Property("Count").EqualTo(5));
Assert.That(unit, Has.Property("Items").With.Count.EqualTo(5));
Assert.That(unit, Has.Property("Items").Property("Count").EqualTo(5));
Assert.That(unit, Has.Property("Items").Count.EqualTo(5));
// This is the one the bug refers to
Assert.That(unit, Has.Property("Items").Matches(Has.Count.EqualTo(5)));
}
private class Unit
{
public List<int> Items { get; private set; }
public Unit()
{
Items = new List<int>(new int[] { 1, 2, 3, 4, 5 });
}
}
}
}
| jadarnel27/nunit | src/NUnitFramework/tests/Syntax/ArbitraryConstraintMatching.cs | C# | mit | 4,211 |
module.exports = {
"sha": "6d1fd68d5d273f6c46113f5843731131ad226d64",
"name": "numenta/experiments",
"target_url": "https://travis-ci.org/numenta/experiments",
"description": "NuPIC Status: Travis CI build has not started.",
"state": "pending",
"branches": [],
"commit": {
"sha": "6d1fd68d5d273f6c46113f5843731131ad226d64",
"commit": {
"author": {
"name": "Matthew Taylor",
"email": "rhyolight@gmail.com",
"date": "2014-03-25T04:38:48Z"
},
"committer": {
"name": "Matthew Taylor",
"email": "rhyolight@gmail.com",
"date": "2014-03-25T04:38:48Z"
},
"message": "Update README.md",
"tree": {
"sha": "067e3d6dd8e046735031285b633fe7c20c4b2b27",
"url": "https://api.github.com/repos/numenta/experiments/git/trees/067e3d6dd8e046735031285b633fe7c20c4b2b27"
},
"url": "https://api.github.com/repos/numenta/experiments/git/commits/6d1fd68d5d273f6c46113f5843731131ad226d64",
"comment_count": 0
},
"url": "https://api.github.com/repos/numenta/experiments/commits/6d1fd68d5d273f6c46113f5843731131ad226d64",
"html_url": "https://github.com/numenta/experiments/commit/6d1fd68d5d273f6c46113f5843731131ad226d64",
"comments_url": "https://api.github.com/repos/numenta/experiments/commits/6d1fd68d5d273f6c46113f5843731131ad226d64/comments",
"author": {
"login": "rhyolight",
"id": 15566,
"avatar_url": "https://avatars.githubusercontent.com/u/15566?",
"gravatar_id": "92b95d73c678f23c6060e63bff3dbcbd",
"url": "https://api.github.com/users/rhyolight",
"html_url": "https://github.com/rhyolight",
"followers_url": "https://api.github.com/users/rhyolight/followers",
"following_url": "https://api.github.com/users/rhyolight/following{/other_user}",
"gists_url": "https://api.github.com/users/rhyolight/gists{/gist_id}",
"starred_url": "https://api.github.com/users/rhyolight/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/rhyolight/subscriptions",
"organizations_url": "https://api.github.com/users/rhyolight/orgs",
"repos_url": "https://api.github.com/users/rhyolight/repos",
"events_url": "https://api.github.com/users/rhyolight/events{/privacy}",
"received_events_url": "https://api.github.com/users/rhyolight/received_events",
"type": "User",
"site_admin": false
},
"committer": {
"login": "rhyolight",
"id": 15566,
"avatar_url": "https://avatars.githubusercontent.com/u/15566?",
"gravatar_id": "92b95d73c678f23c6060e63bff3dbcbd",
"url": "https://api.github.com/users/rhyolight",
"html_url": "https://github.com/rhyolight",
"followers_url": "https://api.github.com/users/rhyolight/followers",
"following_url": "https://api.github.com/users/rhyolight/following{/other_user}",
"gists_url": "https://api.github.com/users/rhyolight/gists{/gist_id}",
"starred_url": "https://api.github.com/users/rhyolight/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/rhyolight/subscriptions",
"organizations_url": "https://api.github.com/users/rhyolight/orgs",
"repos_url": "https://api.github.com/users/rhyolight/repos",
"events_url": "https://api.github.com/users/rhyolight/events{/privacy}",
"received_events_url": "https://api.github.com/users/rhyolight/received_events",
"type": "User",
"site_admin": false
},
"parents": [{
"sha": "9594d18d57ca80cdb66aacce2475eebca61c1593",
"url": "https://api.github.com/repos/numenta/experiments/commits/9594d18d57ca80cdb66aacce2475eebca61c1593",
"html_url": "https://github.com/numenta/experiments/commit/9594d18d57ca80cdb66aacce2475eebca61c1593"
}]
},
"repository": {
"id": 10708772,
"name": "experiments",
"full_name": "numenta/experiments",
"owner": {
"login": "numenta",
"id": 1039191,
"avatar_url": "https://avatars.githubusercontent.com/u/1039191?",
"gravatar_id": "faac04630eba5ec1aeda8bcbca3ff018",
"url": "https://api.github.com/users/numenta",
"html_url": "https://github.com/numenta",
"followers_url": "https://api.github.com/users/numenta/followers",
"following_url": "https://api.github.com/users/numenta/following{/other_user}",
"gists_url": "https://api.github.com/users/numenta/gists{/gist_id}",
"starred_url": "https://api.github.com/users/numenta/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/numenta/subscriptions",
"organizations_url": "https://api.github.com/users/numenta/orgs",
"repos_url": "https://api.github.com/users/numenta/repos",
"events_url": "https://api.github.com/users/numenta/events{/privacy}",
"received_events_url": "https://api.github.com/users/numenta/received_events",
"type": "Organization",
"site_admin": false
},
"private": false,
"html_url": "https://github.com/numenta/experiments",
"description": "A junk repo for experimenting with git flows and toolins",
"fork": false,
"url": "https://api.github.com/repos/numenta/experiments",
"forks_url": "https://api.github.com/repos/numenta/experiments/forks",
"keys_url": "https://api.github.com/repos/numenta/experiments/keys{/key_id}",
"collaborators_url": "https://api.github.com/repos/numenta/experiments/collaborators{/collaborator}",
"teams_url": "https://api.github.com/repos/numenta/experiments/teams",
"hooks_url": "https://api.github.com/repos/numenta/experiments/hooks",
"issue_events_url": "https://api.github.com/repos/numenta/experiments/issues/events{/number}",
"events_url": "https://api.github.com/repos/numenta/experiments/events",
"assignees_url": "https://api.github.com/repos/numenta/experiments/assignees{/user}",
"branches_url": "https://api.github.com/repos/numenta/experiments/branches{/branch}",
"tags_url": "https://api.github.com/repos/numenta/experiments/tags",
"blobs_url": "https://api.github.com/repos/numenta/experiments/git/blobs{/sha}",
"git_tags_url": "https://api.github.com/repos/numenta/experiments/git/tags{/sha}",
"git_refs_url": "https://api.github.com/repos/numenta/experiments/git/refs{/sha}",
"trees_url": "https://api.github.com/repos/numenta/experiments/git/trees{/sha}",
"statuses_url": "https://api.github.com/repos/numenta/experiments/statuses/{sha}",
"languages_url": "https://api.github.com/repos/numenta/experiments/languages",
"stargazers_url": "https://api.github.com/repos/numenta/experiments/stargazers",
"contributors_url": "https://api.github.com/repos/numenta/experiments/contributors",
"subscribers_url": "https://api.github.com/repos/numenta/experiments/subscribers",
"subscription_url": "https://api.github.com/repos/numenta/experiments/subscription",
"commits_url": "https://api.github.com/repos/numenta/experiments/commits{/sha}",
"git_commits_url": "https://api.github.com/repos/numenta/experiments/git/commits{/sha}",
"comments_url": "https://api.github.com/repos/numenta/experiments/comments{/number}",
"issue_comment_url": "https://api.github.com/repos/numenta/experiments/issues/comments/{number}",
"contents_url": "https://api.github.com/repos/numenta/experiments/contents/{+path}",
"compare_url": "https://api.github.com/repos/numenta/experiments/compare/{base}...{head}",
"merges_url": "https://api.github.com/repos/numenta/experiments/merges",
"archive_url": "https://api.github.com/repos/numenta/experiments/{archive_format}{/ref}",
"downloads_url": "https://api.github.com/repos/numenta/experiments/downloads",
"issues_url": "https://api.github.com/repos/numenta/experiments/issues{/number}",
"pulls_url": "https://api.github.com/repos/numenta/experiments/pulls{/number}",
"milestones_url": "https://api.github.com/repos/numenta/experiments/milestones{/number}",
"notifications_url": "https://api.github.com/repos/numenta/experiments/notifications{?since,all,participating}",
"labels_url": "https://api.github.com/repos/numenta/experiments/labels{/name}",
"releases_url": "https://api.github.com/repos/numenta/experiments/releases{/id}",
"created_at": "2013-06-15T16:23:06Z",
"updated_at": "2014-03-25T04:30:09Z",
"pushed_at": "2014-03-25T04:30:07Z",
"git_url": "git://github.com/numenta/experiments.git",
"ssh_url": "git@github.com:numenta/experiments.git",
"clone_url": "https://github.com/numenta/experiments.git",
"svn_url": "https://github.com/numenta/experiments",
"homepage": null,
"size": 344,
"stargazers_count": 2,
"watchers_count": 2,
"language": null,
"has_issues": true,
"has_downloads": true,
"has_wiki": true,
"forks_count": 4,
"mirror_url": null,
"open_issues_count": 7,
"forks": 4,
"open_issues": 7,
"watchers": 2,
"default_branch": "master",
"master_branch": "master"
},
"sender": {
"login": "numenta-ci",
"id": 4650657,
"avatar_url": "https://avatars.githubusercontent.com/u/4650657?",
"gravatar_id": "00d730d6342f80cdad84a37a176d0c49",
"url": "https://api.github.com/users/numenta-ci",
"html_url": "https://github.com/numenta-ci",
"followers_url": "https://api.github.com/users/numenta-ci/followers",
"following_url": "https://api.github.com/users/numenta-ci/following{/other_user}",
"gists_url": "https://api.github.com/users/numenta-ci/gists{/gist_id}",
"starred_url": "https://api.github.com/users/numenta-ci/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/numenta-ci/subscriptions",
"organizations_url": "https://api.github.com/users/numenta-ci/orgs",
"repos_url": "https://api.github.com/users/numenta-ci/repos",
"events_url": "https://api.github.com/users/numenta-ci/events{/privacy}",
"received_events_url": "https://api.github.com/users/numenta-ci/received_events",
"type": "User",
"site_admin": false
}
}; | brev/nupic.tools | test/github_payloads/status_nupic_pending.js | JavaScript | mit | 10,928 |
<?php
declare(strict_types=1);
/**
* This file is part of the Zephir.
*
* (c) Phalcon Team <team@zephir-lang.com>
*
* For the full copyright and license information, please view
* the LICENSE file that was distributed with this source code.
*/
namespace Extension;
use PHPUnit\Framework\TestCase;
use Stub\Oo\DynamicProp;
final class DynamicPropTest extends TestCase
{
public function testSetProperty(): void
{
$class = new DynamicProp();
$this->assertNull($class->test);
$class->setPropertyInt('test');
$this->assertSame(10, $class->test);
$class->setPropertyBoolTrue('test');
$this->assertTrue($class->test);
$class->setPropertyBoolFalse('test');
$this->assertFalse($class->test);
$class->setPropertyString('test');
$this->assertSame('string', $class->test);
$class->setPropertyChar('test');
$this->assertSame(\ord('A'), $class->test);
$class->setPropertyUChar('test');
$this->assertSame(\ord('B') - 1, $class->test);
$class->setPropertyNull('test');
$this->assertNull($class->test);
$class->setPropertyVariableInt('test');
$this->assertSame(10, $class->test);
$class->setPropertyVariableString('test');
$this->assertSame('test', $class->test);
$class->setPropertyVariableBoolTrue('test');
$this->assertTrue($class->test);
$class->setPropertyVariableBoolFalse('test');
$this->assertFalse($class->test);
$class->setPropertyInt('property');
$this->assertSame(10, $class->property);
$class->setExistingStringProperty();
$this->assertSame('works', $class->test);
$class->setExistingStringPropertyString('test');
$this->assertSame('test', $class->eproperty);
$class->setNonExistingStringProperty();
$this->assertSame('works', $class->property);
}
}
| phalcon/zephir | tests/Extension/DynamicPropTest.php | PHP | mit | 1,941 |
// Copyright (c) Microsoft Corporation. All Rights Reserved. See License.txt in the project root for license information.
using Microsoft.VisualStudio.Shell.Interop;
using System;
using System.Runtime.InteropServices;
using Microsoft.VisualStudio.OLE.Interop;
using Microsoft.VisualStudio.Shell;
using System.Diagnostics;
using System.Globalization;
using System.Collections;
using System.IO;
using System.Collections.Generic;
using System.Linq;
using EnvDTE;
using System.Diagnostics.CodeAnalysis;
/* This file provides a basefunctionallity for IVsCfgProvider2.
Instead of using the IVsProjectCfgEventsHelper object we have our own little sink and call our own helper methods
similiar to the interface. But there is no real benefit in inheriting from the interface in the first place.
Using the helper object seems to be:
a) undocumented
b) not really wise in the managed world
*/
namespace Microsoft.VisualStudio.FSharp.ProjectSystem
{
[CLSCompliant(false)]
[ComVisible(true)]
public class ConfigProvider : IVsCfgProvider2, IVsProjectCfgProvider, IVsExtensibleObject
{
private ProjectNode project;
private EventSinkCollection cfgEventSinks = new EventSinkCollection();
private List<KeyValuePair<KeyValuePair<string, string>, string>> newCfgProps = new List<KeyValuePair<KeyValuePair<string, string>, string>>();
private Dictionary<ConfigCanonicalName, ProjectConfig> configurationsList = new Dictionary<ConfigCanonicalName, ProjectConfig>();
/// <summary>
/// The associated project.
/// </summary>
public ProjectNode ProjectMgr
{
get
{
return this.project;
}
}
/// <summary>
/// If the project system wants to add custom properties to the property group then
/// they provide us with this data.
/// Returns/sets the [(<propName, propCondition>) <propValue>] collection
/// </summary>
public virtual List<KeyValuePair<KeyValuePair<string, string>, string>> NewConfigProperties
{
get
{
return newCfgProps;
}
set
{
newCfgProps = value;
}
}
internal ConfigProvider(ProjectNode manager)
{
this.project = manager;
}
/// <summary>
/// Creates new Project Configuartion objects based on the configuration name.
/// </summary>
/// <param name="canonicalName">The name of the configuration</param>
/// <returns>An instance of a ProjectConfig object.</returns>
internal ProjectConfig GetProjectConfiguration(ConfigCanonicalName canonicalName)
{
// if we already created it, return the cached one
if (configurationsList.ContainsKey(canonicalName))
{
return configurationsList[canonicalName];
}
ProjectConfig requestedConfiguration = CreateProjectConfiguration(canonicalName);
configurationsList.Add(canonicalName, requestedConfiguration);
return requestedConfiguration;
}
internal virtual ProjectConfig CreateProjectConfiguration(ConfigCanonicalName canonicalName)
{
return new ProjectConfig(this.project, canonicalName);
}
/// <summary>
/// Provides access to the IVsProjectCfg interface implemented on a project's configuration object.
/// </summary>
/// <param name="projectCfgCanonicalName">The canonical name of the configuration to access.</param>
/// <param name="projectCfg">The IVsProjectCfg interface of the configuration identified by szProjectCfgCanonicalName.</param>
/// <returns>If the method succeeds, it returns S_OK. If it fails, it returns an error code. </returns>
public virtual int OpenProjectCfg(string projectCfgCanonicalName, out IVsProjectCfg projectCfg)
{
if (projectCfgCanonicalName == null)
{
throw new ArgumentNullException("projectCfgCanonicalName");
}
projectCfg = null;
// Be robust in release
if (projectCfgCanonicalName == null)
{
return VSConstants.E_INVALIDARG;
}
Debug.Assert(this.project != null && this.project.BuildProject != null);
string[] configs = GetPropertiesConditionedOn(ProjectFileConstants.Configuration);
string[] platforms = GetPropertiesConditionedOn(ProjectFileConstants.Platform);
var configCanonicalName = new ConfigCanonicalName(projectCfgCanonicalName);
foreach (string config in configs)
{
foreach (string platform in platforms)
{
if (configCanonicalName == new ConfigCanonicalName(config, platform))
{
projectCfg = this.GetProjectConfiguration(configCanonicalName);
if (projectCfg != null)
{
return VSConstants.S_OK;
}
else
{
return VSConstants.E_FAIL;
}
}
}
}
return VSConstants.E_INVALIDARG;
}
/// <summary>
/// Checks whether or not this configuration provider uses independent configurations.
/// </summary>
/// <param name="usesIndependentConfigurations">true if independent configurations are used, false if they are not used. By default returns true.</param>
/// <returns>If the method succeeds, it returns S_OK. If it fails, it returns an error code.</returns>
public virtual int get_UsesIndependentConfigurations(out int usesIndependentConfigurations)
{
usesIndependentConfigurations = 1;
return VSConstants.S_OK;
}
/// <summary>
/// Copies an existing configuration name or creates a new one.
/// </summary>
/// <param name="name">The name of the new configuration.</param>
/// <param name="cloneName">the name of the configuration to copy, or a null reference, indicating that AddCfgsOfCfgName should create a new configuration.</param>
/// <param name="fPrivate">Flag indicating whether or not the new configuration is private. If fPrivate is set to true, the configuration is private. If set to false, the configuration is public. This flag can be ignored.</param>
/// <returns>If the method succeeds, it returns S_OK. If it fails, it returns an error code. </returns>
public virtual int AddCfgsOfCfgName(string name, string cloneName, int fPrivate)
{
// We need to QE/QS the project file
if (!this.ProjectMgr.QueryEditProjectFile(false))
{
throw Marshal.GetExceptionForHR(VSConstants.OLE_E_PROMPTSAVECANCELLED);
}
// Get all configs
this.project.BuildProject.ReevaluateIfNecessary();
List<Microsoft.Build.Construction.ProjectPropertyGroupElement> configGroup = new List<Microsoft.Build.Construction.ProjectPropertyGroupElement>(this.project.BuildProject.Xml.PropertyGroups);
// platform -> property group
var configToClone = new Dictionary<string,Microsoft.Build.Construction.ProjectPropertyGroupElement>(StringComparer.Ordinal);
if (cloneName != null)
{
// Find the configuration to clone
foreach (var currentConfig in configGroup)
{
// Only care about conditional property groups
if (currentConfig.Condition == null || currentConfig.Condition.Length == 0)
continue;
var configCanonicalName = ConfigCanonicalName.OfCondition(currentConfig.Condition);
// Skip if it isn't the group we want
if (String.Compare(configCanonicalName.ConfigName, cloneName, StringComparison.OrdinalIgnoreCase) != 0)
continue;
if (!configToClone.ContainsKey(configCanonicalName.Platform))
configToClone.Add(configCanonicalName.Platform, currentConfig);
}
}
var platforms = GetPlatformsFromProject();
if (platforms.Length == 0) platforms = new[] { String.Empty };
foreach (var platform in platforms)
{
// If we have any property groups to clone, and we do not have sourec for this platform, skip
if (configToClone.Count > 0 && !configToClone.ContainsKey(platform)) continue;
var newCanonicalName = new ConfigCanonicalName(name, platform);
Microsoft.Build.Construction.ProjectPropertyGroupElement newConfig = null;
if (configToClone.ContainsKey(platform))
{
// Clone the configuration settings
newConfig = this.project.ClonePropertyGroup(configToClone[platform]);
//Will be added later with the new values to the path
foreach (Microsoft.Build.Construction.ProjectPropertyElement property in newConfig.Properties)
{
if (property.Name.Equals("OutputPath", StringComparison.OrdinalIgnoreCase))
{
property.Parent.RemoveChild(property);
}
}
}
else
{
// no source to clone from, lets just create a new empty config
PopulateEmptyConfig(ref newConfig);
if (!String.IsNullOrEmpty(newCanonicalName.MSBuildPlatform))
newConfig.AddProperty(ProjectFileConstants.PlatformTarget, newCanonicalName.PlatformTarget);
}
//add the output path
this.AddOutputPath(newConfig, name);
// Set the condition that will define the new configuration
string newCondition = newCanonicalName.ToMSBuildCondition();
newConfig.Condition = newCondition;
}
NotifyOnCfgNameAdded(name);
return VSConstants.S_OK;
}
private void PopulateEmptyConfig(ref Microsoft.Build.Construction.ProjectPropertyGroupElement newConfig)
{
newConfig = this.project.BuildProject.Xml.AddPropertyGroup();
// Get the list of property name, condition value from the config provider
IList<KeyValuePair<KeyValuePair<string, string>, string>> propVals = this.NewConfigProperties;
foreach (KeyValuePair<KeyValuePair<string, string>, string> data in propVals)
{
KeyValuePair<string, string> propData = data.Key;
string value = data.Value;
Microsoft.Build.Construction.ProjectPropertyElement newProperty = newConfig.AddProperty(propData.Key, value);
if (!String.IsNullOrEmpty(propData.Value))
newProperty.Condition = propData.Value;
}
}
private void AddOutputPath(Microsoft.Build.Construction.ProjectPropertyGroupElement newConfig, string configName)
{
//add the output path
string outputBasePath = this.ProjectMgr.OutputBaseRelativePath;
if (outputBasePath.EndsWith(Path.DirectorySeparatorChar.ToString(), StringComparison.Ordinal))
outputBasePath = Path.GetDirectoryName(outputBasePath);
newConfig.AddProperty("OutputPath", Path.Combine(outputBasePath, configName) + Path.DirectorySeparatorChar.ToString());
}
/// <summary>
/// Copies an existing platform name or creates a new one.
/// </summary>
/// <param name="platformName">The name of the new platform.</param>
/// <param name="clonePlatformName">The name of the platform to copy, or a null reference, indicating that AddCfgsOfPlatformName should create a new platform.</param>
/// <returns>If the method succeeds, it returns S_OK. If it fails, it returns an error code.</returns>
public virtual int AddCfgsOfPlatformName(string platformName, string clonePlatformName)
{
// We need to QE/QS the project file
if (!this.ProjectMgr.QueryEditProjectFile(false))
{
throw Marshal.GetExceptionForHR(VSConstants.OLE_E_PROMPTSAVECANCELLED);
}
// Get all configs
this.project.BuildProject.ReevaluateIfNecessary();
List<Microsoft.Build.Construction.ProjectPropertyGroupElement> configGroup = new List<Microsoft.Build.Construction.ProjectPropertyGroupElement>(this.project.BuildProject.Xml.PropertyGroups);
// configName -> property group
var configToClone = new Dictionary<string, Microsoft.Build.Construction.ProjectPropertyGroupElement>(StringComparer.Ordinal);
if (clonePlatformName != null)
{
// Find the configuration to clone
foreach (var currentConfig in configGroup)
{
// Only care about conditional property groups
if (currentConfig.Condition == null || currentConfig.Condition.Length == 0)
continue;
var configCanonicalName = ConfigCanonicalName.OfCondition(currentConfig.Condition);
// Skip if it isn't the group we want
if (!configCanonicalName.MatchesPlatform(clonePlatformName))
continue;
if (!configToClone.ContainsKey(configCanonicalName.ConfigName))
configToClone.Add(configCanonicalName.ConfigName, currentConfig);
}
}
var configNames = GetPropertiesConditionedOn(ProjectFileConstants.Configuration);
if (configNames.Length == 0) return VSConstants.E_FAIL;
foreach (var configName in configNames)
{
// If we have any property groups to clone, and we do not have sourec for this config, skip
if (configToClone.Count > 0 && !configToClone.ContainsKey(configName)) continue;
var newCanonicalName = new ConfigCanonicalName(configName, platformName);
Microsoft.Build.Construction.ProjectPropertyGroupElement newConfig = null;
if (configToClone.ContainsKey(configName))
{
// Clone the configuration settings
newConfig = this.project.ClonePropertyGroup(configToClone[configName]);
foreach (Microsoft.Build.Construction.ProjectPropertyElement property in newConfig.Properties)
{
if (property.Name.Equals(ProjectFileConstants.PlatformTarget, StringComparison.OrdinalIgnoreCase))
{
property.Parent.RemoveChild(property);
}
}
}
else
{
// no source to clone from, lets just create a new empty config
PopulateEmptyConfig(ref newConfig);
this.AddOutputPath(newConfig, configName);
}
newConfig.AddProperty(ProjectFileConstants.PlatformTarget, newCanonicalName.PlatformTarget);
// Set the condition that will define the new configuration
string newCondition = newCanonicalName.ToMSBuildCondition();
newConfig.Condition = newCondition;
}
NotifyOnPlatformNameAdded(platformName);
return VSConstants.S_OK;
}
/// <summary>
/// Deletes a specified configuration name.
/// </summary>
/// <param name="name">The name of the configuration to be deleted.</param>
/// <returns>If the method succeeds, it returns S_OK. If it fails, it returns an error code. </returns>
public virtual int DeleteCfgsOfCfgName(string name)
{
// We need to QE/QS the project file
if (!this.ProjectMgr.QueryEditProjectFile(false))
{
throw Marshal.GetExceptionForHR(VSConstants.OLE_E_PROMPTSAVECANCELLED);
}
if (name == null)
{
Debug.Fail(String.Format(CultureInfo.CurrentCulture, "Name of the configuration should not be null if you want to delete it from project: {0}", MSBuildProject.GetFullPath((this.project.BuildProject))));
// The configuration " '$(Configuration)' == " does not exist, so technically the goal
// is achieved so return S_OK
return VSConstants.S_OK;
}
this.project.BuildProject.ReevaluateIfNecessary();
var configGroups = new List<Microsoft.Build.Construction.ProjectPropertyGroupElement>(this.project.BuildProject.Xml.PropertyGroups);
var groupsToDelete = new List<Microsoft.Build.Construction.ProjectPropertyGroupElement>();
foreach (var config in configGroups)
{
var configCanonicalName = ConfigCanonicalName.OfCondition(config.Condition);
if (configCanonicalName.MatchesConfigName(name))
{
groupsToDelete.Add(config);
configurationsList.Remove(configCanonicalName);
}
}
foreach (var group in groupsToDelete)
{
group.Parent.RemoveChild(group);
}
NotifyOnCfgNameDeleted(name);
return VSConstants.S_OK;
}
/// <summary>
/// Deletes a specified platform name.
/// </summary>
/// <param name="platName">The platform name to delete.</param>
/// <returns>If the method succeeds, it returns S_OK. If it fails, it returns an error code.</returns>
public virtual int DeleteCfgsOfPlatformName(string platName)
{
// We need to QE/QS the project file
if (!this.ProjectMgr.QueryEditProjectFile(false))
{
throw Marshal.GetExceptionForHR(VSConstants.OLE_E_PROMPTSAVECANCELLED);
}
if (platName == null)
{
Debug.Fail(String.Format(CultureInfo.CurrentCulture, "Name of the platform should not be null if you want to delete it from project: {0}", MSBuildProject.GetFullPath((this.project.BuildProject))));
return VSConstants.S_OK;
}
this.project.BuildProject.ReevaluateIfNecessary();
var configGroups = new List<Microsoft.Build.Construction.ProjectPropertyGroupElement>(this.project.BuildProject.Xml.PropertyGroups);
var groupsToDelete = new List<Microsoft.Build.Construction.ProjectPropertyGroupElement>();
foreach (var config in configGroups)
{
var configCanonicalName = ConfigCanonicalName.OfCondition(config.Condition);
if (configCanonicalName.MatchesPlatform(platName))
{
groupsToDelete.Add(config);
configurationsList.Remove(configCanonicalName);
}
}
foreach (var group in groupsToDelete)
{
group.Parent.RemoveChild(group);
}
NotifyOnPlatformNameDeleted(platName);
return VSConstants.S_OK;
}
/// <summary>
/// Returns the existing configurations stored in the project file.
/// </summary>
/// <param name="celt">Specifies the requested number of property names. If this number is unknown, celt can be zero.</param>
/// <param name="names">On input, an allocated array to hold the number of configuration property names specified by celt. This parameter can also be a null reference if the celt parameter is zero.
/// On output, names contains configuration property names.</param>
/// <param name="actual">The actual number of property names returned.</param>
/// <returns>If the method succeeds, it returns S_OK. If it fails, it returns an error code.</returns>
public virtual int GetCfgNames(uint celt, string[] names, uint[] actual)
{
// get's called twice, once for allocation, then for retrieval
int i = 0;
string[] configList = GetPropertiesConditionedOn(ProjectFileConstants.Configuration);
if (configList.Length == 0)
{
configList = new[] { ProjectConfig.Debug };
}
if (names != null)
{
foreach (string config in configList)
{
names[i++] = config;
if (i == celt)
break;
}
}
else
i = configList.Length;
if (actual != null)
{
actual[0] = (uint)i;
}
return VSConstants.S_OK;
}
/// <summary>
/// Returns the configuration associated with a specified configuration or platform name.
/// </summary>
/// <param name="name">The name of the configuration to be returned.</param>
/// <param name="platName">The name of the platform for the configuration to be returned.</param>
/// <param name="cfg">The implementation of the IVsCfg interface.</param>
/// <returns>If the method succeeds, it returns S_OK. If it fails, it returns an error code.</returns>
public virtual int GetCfgOfName(string name, string platName, out IVsCfg cfg)
{
cfg = null;
cfg = this.GetProjectConfiguration(new ConfigCanonicalName(name, platName));
return VSConstants.S_OK;
}
/// <summary>
/// Returns a specified configuration property.
/// </summary>
/// <param name="propid">Specifies the property identifier for the property to return. For valid propid values, see __VSCFGPROPID.</param>
/// <param name="var">The value of the property.</param>
/// <returns>If the method succeeds, it returns S_OK. If it fails, it returns an error code.</returns>
public virtual int GetCfgProviderProperty(int propid, out object var)
{
var = false;
switch ((__VSCFGPROPID)propid)
{
case __VSCFGPROPID.VSCFGPROPID_SupportsCfgAdd:
var = true;
break;
case __VSCFGPROPID.VSCFGPROPID_SupportsCfgDelete:
var = true;
break;
case __VSCFGPROPID.VSCFGPROPID_SupportsCfgRename:
var = true;
break;
case __VSCFGPROPID.VSCFGPROPID_SupportsPlatformAdd:
var = true;
break;
case __VSCFGPROPID.VSCFGPROPID_SupportsPlatformDelete:
var = true;
break;
}
return VSConstants.S_OK;
}
/// <summary>
/// Returns the per-configuration objects for this object.
/// </summary>
/// <param name="celt">Number of configuration objects to be returned or zero, indicating a request for an unknown number of objects.</param>
/// <param name="a">On input, pointer to an interface array or a null reference. On output, this parameter points to an array of IVsCfg interfaces belonging to the requested configuration objects.</param>
/// <param name="actual">The number of configuration objects actually returned or a null reference, if this information is not necessary.</param>
/// <param name="flags">Flags that specify settings for project configurations, or a null reference (Nothing in Visual Basic) if no additional flag settings are required. For valid prgrFlags values, see __VSCFGFLAGS.</param>
/// <returns>If the method succeeds, it returns S_OK. If it fails, it returns an error code.</returns>
public virtual int GetCfgs(uint celt, IVsCfg[] a, uint[] actual, uint[] flags)
{
if (flags != null)
flags[0] = 0;
int i = 0;
string[] configList = GetPropertiesConditionedOn(ProjectFileConstants.Configuration);
string[] platforms = GetPropertiesConditionedOn(ProjectFileConstants.Platform);
if (configList.Length == 0)
{
configList = new[] { ProjectConfig.Debug };
}
if (platforms.Length == 0)
{
platforms = new[] { ProjectConfig.AnyCPU };
}
if (a != null)
{
foreach (string configName in configList)
{
foreach (string platformName in platforms)
{
a[i] = this.GetProjectConfiguration(new ConfigCanonicalName(configName, platformName));
i++;
if (i == celt)
break;
}
if (i == celt)
break;
}
}
else
i = configList.Length * platforms.Length;
if (actual != null)
actual[0] = (uint)i;
return VSConstants.S_OK;
}
/// <summary>
/// Returns one or more platform names.
/// </summary>
/// <param name="celt">Specifies the requested number of platform names. If this number is unknown, celt can be zero.</param>
/// <param name="names">On input, an allocated array to hold the number of platform names specified by celt. This parameter can also be a null reference if the celt parameter is zero. On output, names contains platform names.</param>
/// <param name="actual">The actual number of platform names returned.</param>
/// <returns>If the method succeeds, it returns S_OK. If it fails, it returns an error code.</returns>
public virtual int GetPlatformNames(uint celt, string[] names, uint[] actual)
{
string[] platforms = this.GetPlatformsFromProject();
return GetPlatforms(celt, names, actual, platforms);
}
/// <summary>
/// Returns the set of platforms that are installed on the user's machine.
/// </summary>
/// <param name="celt">Specifies the requested number of supported platform names. If this number is unknown, celt can be zero.</param>
/// <param name="names">On input, an allocated array to hold the number of names specified by celt. This parameter can also be a null reference (Nothing in Visual Basic)if the celt parameter is zero. On output, names contains the names of supported platforms</param>
/// <param name="actual">The actual number of platform names returned.</param>
/// <returns>If the method succeeds, it returns S_OK. If it fails, it returns an error code.</returns>
public virtual int GetSupportedPlatformNames(uint celt, string[] names, uint[] actual)
{
string[] platforms = this.GetSupportedPlatformsFromProject();
return GetPlatforms(celt, names, actual, platforms);
}
/// <summary>
/// Assigns a new name to a configuration.
/// </summary>
/// <param name="old">The old name of the target configuration.</param>
/// <param name="newname">The new name of the target configuration.</param>
/// <returns>If the method succeeds, it returns S_OK. If it fails, it returns an error code.</returns>
public virtual int RenameCfgsOfCfgName(string old, string newname)
{
this.project.BuildProject.ReevaluateIfNecessary();
foreach (var config in this.project.BuildProject.Xml.PropertyGroups)
{
// Only care about conditional property groups
if (config.Condition == null || config.Condition.Length == 0)
continue;
var configCanonicalName = ConfigCanonicalName.OfCondition(config.Condition);
// Skip if it isn't the group we want
if (!configCanonicalName.MatchesConfigName(old))
continue;
var newCanonicalName = new ConfigCanonicalName(newname, configCanonicalName.Platform);
// Change the name
config.Condition = newCanonicalName.ToMSBuildCondition();
var propertyCollection = config.Properties;
var outputPathProperty = propertyCollection.Where(p => p.Name == ProjectFileConstants.OutputPath).FirstOrDefault();
if (outputPathProperty != null)
{
string outputBasePath = this.ProjectMgr.OutputBaseRelativePath;
if (outputBasePath.EndsWith(Path.DirectorySeparatorChar.ToString(), StringComparison.Ordinal))
outputBasePath = Path.GetDirectoryName(outputBasePath);
var expectedOutputPathValue = Path.Combine(outputBasePath, old);
if (String.Equals(expectedOutputPathValue, outputPathProperty.Value, StringComparison.OrdinalIgnoreCase))
{
var newOutputPathValue = Path.Combine(outputBasePath, newname);
config.SetProperty(ProjectFileConstants.OutputPath, newOutputPathValue);
}
}
// Update the name in our config list
if (configurationsList.ContainsKey(configCanonicalName))
{
ProjectConfig configuration = configurationsList[configCanonicalName];
configurationsList.Remove(configCanonicalName);
configurationsList.Add(newCanonicalName, configuration);
// notify the configuration of its new name
configuration.ConfigName = newname;
}
}
NotifyOnCfgNameRenamed(old, newname);
return VSConstants.S_OK;
}
/// <summary>
/// Cancels a registration for configuration event notification.
/// </summary>
/// <param name="cookie">The cookie used for registration.</param>
/// <returns>If the method succeeds, it returns S_OK. If it fails, it returns an error code.</returns>
public virtual int UnadviseCfgProviderEvents(uint cookie)
{
this.cfgEventSinks.RemoveAt(cookie);
return VSConstants.S_OK;
}
/// <summary>
/// Registers the caller for configuration event notification.
/// </summary>
/// <param name="sink">Reference to the IVsCfgProviderEvents interface to be called to provide notification of configuration events.</param>
/// <param name="cookie">Reference to a token representing the completed registration</param>
/// <returns>If the method succeeds, it returns S_OK. If it fails, it returns an error code.</returns>
public virtual int AdviseCfgProviderEvents(IVsCfgProviderEvents sink, out uint cookie)
{
cookie = this.cfgEventSinks.Add(sink);
return VSConstants.S_OK;
}
/// <summary>
/// Proved access to an IDispatchable object being a list of configuration properties
/// </summary>
/// <param name="configurationName">Combined Name and Platform for the configuration requested</param>
/// <param name="configurationProperties">The IDispatchcable object</param>
/// <returns>S_OK if successful</returns>
public virtual int GetAutomationObject(string configurationName, out object configurationProperties)
{
//Init out param
configurationProperties = null;
var canonicalCfgName = new ConfigCanonicalName(configurationName);
// Get the configuration
IVsCfg cfg;
ErrorHandler.ThrowOnFailure(this.GetCfgOfName(canonicalCfgName.ConfigName, canonicalCfgName.Platform, out cfg));
// Get the properties of the configuration
configurationProperties = ((ProjectConfig)cfg).ConfigurationProperties;
return VSConstants.S_OK;
}
/// <summary>
/// Called when a new configuration name was added.
/// </summary>
/// <param name="name">The name of configuration just added.</param>
private void NotifyOnCfgNameAdded(string name)
{
foreach (IVsCfgProviderEvents sink in this.cfgEventSinks)
{
ErrorHandler.ThrowOnFailure(sink.OnCfgNameAdded(name));
}
}
/// <summary>
/// Called when a config name was deleted.
/// </summary>
/// <param name="name">The name of the configuration.</param>
private void NotifyOnCfgNameDeleted(string name)
{
foreach (IVsCfgProviderEvents sink in this.cfgEventSinks)
{
ErrorHandler.ThrowOnFailure(sink.OnCfgNameDeleted(name));
}
}
/// <summary>
/// Called when a config name was renamed
/// </summary>
/// <param name="oldName">Old configuration name</param>
/// <param name="newName">New configuration name</param>
private void NotifyOnCfgNameRenamed(string oldName, string newName)
{
foreach (IVsCfgProviderEvents sink in this.cfgEventSinks)
{
ErrorHandler.ThrowOnFailure(sink.OnCfgNameRenamed(oldName, newName));
}
}
/// <summary>
/// Called when a platform name was added
/// </summary>
/// <param name="platformName">The name of the platform.</param>
[SuppressMessage("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
private void NotifyOnPlatformNameAdded(string platformName)
{
foreach (IVsCfgProviderEvents sink in this.cfgEventSinks)
{
ErrorHandler.ThrowOnFailure(sink.OnPlatformNameAdded(platformName));
}
}
/// <summary>
/// Called when a platform name was deleted
/// </summary>
/// <param name="platformName">The name of the platform.</param>
[SuppressMessage("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
private void NotifyOnPlatformNameDeleted(string platformName)
{
foreach (IVsCfgProviderEvents sink in this.cfgEventSinks)
{
ErrorHandler.ThrowOnFailure(sink.OnPlatformNameDeleted(platformName));
}
}
/// <summary>
/// Gets all the platforms defined in the project
/// </summary>
/// <returns>An array of platform names.</returns>
private string[] GetPlatformsFromProject()
{
string[] platforms = GetPropertiesConditionedOn(ProjectFileConstants.Platform);
if (platforms.Length == 0)
{
platforms = new[] { ProjectConfig.AnyCPU };
}
for (int i = 0; i < platforms.Length; i++)
{
platforms[i] = new ConfigCanonicalName("", platforms[i]).Platform;
}
return platforms;
}
/// <summary>
/// Return the supported platform names.
/// </summary>
/// <returns>An array of supported platform names.</returns>
private string[] GetSupportedPlatformsFromProject()
{
this.project.BuildProject.ReevaluateIfNecessary();
string platforms = this.ProjectMgr.BuildProject.GetPropertyValue(ProjectFileConstants.AvailablePlatforms);
if (platforms == null)
{
return new string[] { };
}
if (platforms.Contains(","))
{
return platforms.Split(',');
}
return new string[] { platforms };
}
/// <summary>
/// Common method for handling platform names.
/// </summary>
/// <param name="celt">Specifies the requested number of platform names. If this number is unknown, celt can be zero.</param>
/// <param name="names">On input, an allocated array to hold the number of platform names specified by celt. This parameter can also be null if the celt parameter is zero. On output, names contains platform names</param>
/// <param name="actual">A count of the actual number of platform names returned.</param>
/// <param name="platforms">An array of available platform names</param>
/// <returns>A count of the actual number of platform names returned.</returns>
/// <devremark>The platforms array is never null. It is assured by the callers.</devremark>
private static int GetPlatforms(uint celt, string[] names, uint[] actual, string[] platforms)
{
Debug.Assert(platforms != null, "The plaforms array should never be null");
if (names == null)
{
if (actual == null || actual.Length == 0)
{
throw new ArgumentException(SR.GetString(SR.InvalidParameter, CultureInfo.CurrentUICulture), "actual");
}
actual[0] = (uint)platforms.Length;
return VSConstants.S_OK;
}
//Degenarate case
if (celt == 0)
{
if (actual != null && actual.Length != 0)
{
actual[0] = (uint)platforms.Length;
}
return VSConstants.S_OK;
}
uint returned = 0;
for (int i = 0; i < platforms.Length && names.Length > returned; i++)
{
names[returned] = platforms[i];
returned++;
}
if (actual != null && actual.Length != 0)
{
actual[0] = returned;
}
if (celt > returned)
{
return VSConstants.S_FALSE;
}
return VSConstants.S_OK;
}
/// <summary>
/// Get all the configurations in the project.
/// </summary>
private string[] GetPropertiesConditionedOn(string constant)
{
List<string> configurations;
this.project.BuildProject.ReevaluateIfNecessary();
this.project.BuildProject.ConditionedProperties.TryGetValue(constant, out configurations);
return (configurations == null) ? new string[] { } : configurations.ToArray();
}
}
}
| syeerzy/visualfsharp | vsintegration/src/FSharp.ProjectSystem.Base/Project/ConfigProvider.cs | C# | mit | 39,397 |
export interface Point {
left: number
top: number
}
export interface Rect {
left: number
right: number
top: number
bottom: number
}
export function pointInsideRect(point: Point, rect: Rect): boolean {
return point.left >= rect.left &&
point.left < rect.right &&
point.top >= rect.top &&
point.top < rect.bottom
}
// Returns a new rectangle that is the intersection of the two rectangles. If they don't intersect, returns false
export function intersectRects(rect1: Rect, rect2: Rect): Rect | false {
let res = {
left: Math.max(rect1.left, rect2.left),
right: Math.min(rect1.right, rect2.right),
top: Math.max(rect1.top, rect2.top),
bottom: Math.min(rect1.bottom, rect2.bottom),
}
if (res.left < res.right && res.top < res.bottom) {
return res
}
return false
}
export function translateRect(rect: Rect, deltaX: number, deltaY: number): Rect {
return {
left: rect.left + deltaX,
right: rect.right + deltaX,
top: rect.top + deltaY,
bottom: rect.bottom + deltaY,
}
}
// Returns a new point that will have been moved to reside within the given rectangle
export function constrainPoint(point: Point, rect: Rect): Point {
return {
left: Math.min(Math.max(point.left, rect.left), rect.right),
top: Math.min(Math.max(point.top, rect.top), rect.bottom),
}
}
// Returns a point that is the center of the given rectangle
export function getRectCenter(rect: Rect): Point {
return {
left: (rect.left + rect.right) / 2,
top: (rect.top + rect.bottom) / 2,
}
}
// Subtracts point2's coordinates from point1's coordinates, returning a delta
export function diffPoints(point1: Point, point2: Point): Point {
return {
left: point1.left - point2.left,
top: point1.top - point2.top,
}
}
| fullcalendar/fullcalendar | packages/common/src/util/geom.ts | TypeScript | mit | 1,786 |
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# The test environment is used exclusively to run your application's
# test suite. You never need to work with it otherwise. Remember that
# your test database is "scratch space" for the test suite and is wiped
# and recreated between test runs. Don't rely on the data there!
config.cache_classes = true
# Do not eager load code on boot. This avoids loading your whole application
# just for the purpose of running a single test. If you are using a tool that
# preloads Rails for running tests, you may have to set it to true.
config.eager_load = false
# Configure static asset server for tests with Cache-Control for performance.
config.serve_static_assets = true
config.static_cache_control = 'public, max-age=3600'
# Show full error reports and disable caching.
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Raise exceptions instead of rendering exception templates.
config.action_dispatch.show_exceptions = false
# Disable request forgery protection in test environment.
config.action_controller.allow_forgery_protection = false
# Tell Action Mailer not to deliver emails to the real world.
# The :test delivery method accumulates sent emails in the
# ActionMailer::Base.deliveries array.
config.action_mailer.delivery_method = :test
# Print deprecation notices to the stderr.
config.active_support.deprecation = :stderr
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
config.action_mailer.default_url_options = { :host => 'localhost:3000' }
config.action_mailer.asset_host = 'http://localhost:3000'
end
Rails.application.routes.default_url_options[:host]= 'localhost:3000'
| centaurustech/unlock | config/environments/test.rb | Ruby | mit | 1,867 |
describe('Layer', function() {
// Currently not testing subdomain-based templatedmapprovider, since
// the implementation should be kind of undefined.
it('layer can be created and destroyed', function() {
var p = new MM.TemplatedLayer(
'http://{S}.tile.openstreetmap.org/{Z}/{X}/{Y}.png', ['a']);
var l = new MM.Layer(p);
l.destroy();
expect(l.map).toEqual(null);
});
// Currently not testing subdomain-based templatedmapprovider, since
// the implementation should be kind of undefined.
it('causes the map to throw requesterror when things are not accessible', function() {
var manager, message, p;
var fourohfour = 'http://fffffffffffffffffffffffffffffffff.org/404.png';
runs(function() {
p = new MM.TemplatedLayer(fourohfour);
p.requestManager.addCallback('requesterror', function(a, b, c) {
manager = a;
message = b;
});
var m = new MM.Map(document.createElement('div'), p, { x: 500, y: 500 });
m.setCenter({ lat: 0, lon: 0 }).setZoom(5);
});
waits(500);
runs(function() {
expect(manager).toEqual(p.requestManager);
expect(jasmine.isDomNode(message.element)).toBeTruthy();
expect(message.url).toEqual(fourohfour);
expect(message.url).toEqual('http://fffffffffffffffffffffffffffffffff.org/404.png');
});
});
});
| nickchikore/Product-Developer-Test | node_modules/modestmaps/test/spec/Layer.js | JavaScript | mit | 1,490 |
'use strict';
Object.defineProperty(exports, '__esModule', { value: true });
require('./chunk-14c82365.js');
require('./helpers.js');
require('./chunk-cd0dcc1d.js');
require('./chunk-d7fda995.js');
var __chunk_5 = require('./chunk-13e039f5.js');
var __chunk_19 = require('./chunk-3b860353.js');
//
var script = {
name: 'BMessage',
mixins: [__chunk_19.MessageMixin],
props: {
ariaCloseLabel: String
},
data: function data() {
return {
newIconSize: this.iconSize || this.size || 'is-large'
};
}
};
/* script */
const __vue_script__ = script;
/* template */
var __vue_render__ = function () {var _vm=this;var _h=_vm.$createElement;var _c=_vm._self._c||_h;return _c('transition',{attrs:{"name":"fade"}},[(_vm.isActive)?_c('article',{staticClass:"message",class:[_vm.type, _vm.size]},[(_vm.title)?_c('header',{staticClass:"message-header"},[_c('p',[_vm._v(_vm._s(_vm.title))]),_vm._v(" "),(_vm.closable)?_c('button',{staticClass:"delete",attrs:{"type":"button","aria-label":_vm.ariaCloseLabel},on:{"click":_vm.close}}):_vm._e()]):_vm._e(),_vm._v(" "),_c('section',{staticClass:"message-body"},[_c('div',{staticClass:"media"},[(_vm.computedIcon && _vm.hasIcon)?_c('div',{staticClass:"media-left"},[_c('b-icon',{class:_vm.type,attrs:{"icon":_vm.computedIcon,"pack":_vm.iconPack,"both":"","size":_vm.newIconSize}})],1):_vm._e(),_vm._v(" "),_c('div',{staticClass:"media-content"},[_vm._t("default")],2)])])]):_vm._e()])};
var __vue_staticRenderFns__ = [];
/* style */
const __vue_inject_styles__ = undefined;
/* scoped */
const __vue_scope_id__ = undefined;
/* module identifier */
const __vue_module_identifier__ = undefined;
/* functional template */
const __vue_is_functional_template__ = false;
/* style inject */
/* style inject SSR */
var Message = __chunk_5.__vue_normalize__(
{ render: __vue_render__, staticRenderFns: __vue_staticRenderFns__ },
__vue_inject_styles__,
__vue_script__,
__vue_scope_id__,
__vue_is_functional_template__,
__vue_module_identifier__,
undefined,
undefined
);
var Plugin = {
install: function install(Vue) {
__chunk_5.registerComponent(Vue, Message);
}
};
__chunk_5.use(Plugin);
exports.BMessage = Message;
exports.default = Plugin;
| cdnjs/cdnjs | ajax/libs/buefy/0.8.20/cjs/message.js | JavaScript | mit | 2,270 |
/// <reference path="./subject.ts" />
var Rx;
(function (Rx) {
})(Rx || (Rx = {}));
(function () {
var s = new Rx.AnonymousSubject();
});
//# sourceMappingURL=anonymoussubject.js.map | cyberpuffin/remanddel | node_modules/lite-server/node_modules/browser-sync/node_modules/rx/ts/core/subjects/anonymoussubject.js | JavaScript | mit | 186 |
/**
* @license Highcharts JS v8.2.2 (2020-10-22)
*
* Marker clusters module for Highcharts
*
* (c) 2010-2019 Wojciech Chmiel
*
* License: www.highcharts.com/license
*/
'use strict';
(function (factory) {
if (typeof module === 'object' && module.exports) {
factory['default'] = factory;
module.exports = factory;
} else if (typeof define === 'function' && define.amd) {
define('highcharts/modules/marker-clusters', ['highcharts'], function (Highcharts) {
factory(Highcharts);
factory.Highcharts = Highcharts;
return factory;
});
} else {
factory(typeof Highcharts !== 'undefined' ? Highcharts : undefined);
}
}(function (Highcharts) {
var _modules = Highcharts ? Highcharts._modules : {};
function _registerModule(obj, path, args, fn) {
if (!obj.hasOwnProperty(path)) {
obj[path] = fn.apply(null, args);
}
}
_registerModule(_modules, 'Extensions/MarkerClusters.js', [_modules['Core/Animation/AnimationUtilities.js'], _modules['Core/Series/Series.js'], _modules['Core/Chart/Chart.js'], _modules['Core/Globals.js'], _modules['Core/Options.js'], _modules['Core/Series/Point.js'], _modules['Core/Renderer/SVG/SVGRenderer.js'], _modules['Core/Utilities.js'], _modules['Core/Axis/Axis.js']], function (A, BaseSeries, Chart, H, O, Point, SVGRenderer, U, Axis) {
/* *
*
* Marker clusters module.
*
* (c) 2010-2020 Torstein Honsi
*
* Author: Wojciech Chmiel
*
* License: www.highcharts.com/license
*
* !!!!!!! SOURCE GETS TRANSPILED BY TYPESCRIPT. EDIT TS FILE ONLY. !!!!!!!
*
* */
var animObject = A.animObject;
var defaultOptions = O.defaultOptions;
var addEvent = U.addEvent,
defined = U.defined,
error = U.error,
isArray = U.isArray,
isFunction = U.isFunction,
isObject = U.isObject,
isNumber = U.isNumber,
merge = U.merge,
objectEach = U.objectEach,
relativeLength = U.relativeLength,
syncTimeout = U.syncTimeout;
/**
* Function callback when a cluster is clicked.
*
* @callback Highcharts.MarkerClusterDrillCallbackFunction
*
* @param {Highcharts.Point} this
* The point where the event occured.
*
* @param {Highcharts.PointClickEventObject} event
* Event arguments.
*/
''; // detach doclets from following code
/* eslint-disable no-invalid-this */
var Series = H.Series,
Scatter = BaseSeries.seriesTypes.scatter,
baseGeneratePoints = Series.prototype.generatePoints,
stateIdCounter = 0,
// Points that ids are included in the oldPointsStateId array
// are hidden before animation. Other ones are destroyed.
oldPointsStateId = [];
/**
* Options for marker clusters, the concept of sampling the data
* values into larger blocks in order to ease readability and
* increase performance of the JavaScript charts.
*
* Note: marker clusters module is not working with `boost`
* and `draggable-points` modules.
*
* The marker clusters feature requires the marker-clusters.js
* file to be loaded, found in the modules directory of the download
* package, or online at [code.highcharts.com/modules/marker-clusters.js
* ](code.highcharts.com/modules/marker-clusters.js).
*
* @sample maps/marker-clusters/europe
* Maps marker clusters
* @sample highcharts/marker-clusters/basic
* Scatter marker clusters
* @sample maps/marker-clusters/optimized-kmeans
* Marker clusters with colorAxis
*
* @product highcharts highmaps
* @since 8.0.0
* @optionparent plotOptions.scatter.cluster
*
* @private
*/
var clusterDefaultOptions = {
/**
* Whether to enable the marker-clusters module.
*
* @sample maps/marker-clusters/basic
* Maps marker clusters
* @sample highcharts/marker-clusters/basic
* Scatter marker clusters
*/
enabled: false,
/**
* When set to `false` prevent cluster overlapping - this option
* works only when `layoutAlgorithm.type = "grid"`.
*
* @sample highcharts/marker-clusters/grid
* Prevent overlapping
*/
allowOverlap: true,
/**
* Options for the cluster marker animation.
* @type {boolean|Partial<Highcharts.AnimationOptionsObject>}
* @default { "duration": 500 }
*/
animation: {
/** @ignore-option */
duration: 500
},
/**
* Zoom the plot area to the cluster points range when a cluster is clicked.
*/
drillToCluster: true,
/**
* The minimum amount of points to be combined into a cluster.
* This value has to be greater or equal to 2.
*
* @sample highcharts/marker-clusters/basic
* At least three points in the cluster
*/
minimumClusterSize: 2,
/**
* Options for layout algorithm. Inside there
* are options to change the type of the algorithm,
gridSize,
* distance or iterations.
*/
layoutAlgorithm: {
/**
* Type of the algorithm used to combine points into a cluster.
* There are three available algorithms:
*
* 1) `grid` - grid-based clustering technique. Points are assigned
* to squares of set size depending on their position on the plot
* area. Points inside the grid square are combined into a cluster.
* The grid size can be controlled by `gridSize` property
* (grid size changes at certain zoom levels).
*
* 2) `kmeans` - based on K-Means clustering technique. In the
* first step,
points are divided using the grid method (distance
* property is a grid size) to find the initial amount of clusters.
* Next,
each point is classified by computing the distance between
* each cluster center and that point. When the closest cluster
* distance is lower than distance property set by a user the point
* is added to this cluster otherwise is classified as `noise`. The
* algorithm is repeated until each cluster center not change its
* previous position more than one pixel. This technique is more
* accurate but also more time consuming than the `grid` algorithm,
* especially for big datasets.
*
* 3) `optimizedKmeans` - based on K-Means clustering technique. This
* algorithm uses k-means algorithm only on the chart initialization
* or when chart extremes have greater range than on initialization.
* When a chart is redrawn the algorithm checks only clustered points
* distance from the cluster center and rebuild it when the point is
* spaced enough to be outside the cluster. It provides performance
* improvement and more stable clusters position yet can be used rather
* on small and sparse datasets.
*
* By default,
the algorithm depends on visible quantity of points
* and `kmeansThreshold`. When there are more visible points than the
* `kmeansThreshold` the `grid` algorithm is used,
otherwise `kmeans`.
*
* The custom clustering algorithm can be added by assigning a callback
* function as the type property. This function takes an array of
* `processedXData`,
`processedYData`,
`processedXData` indexes and
* `layoutAlgorithm` options as arguments and should return an object
* with grouped data.
*
* The algorithm should return an object like that:
* <pre>{
* clusterId1: [{
* x: 573,
* y: 285,
* index: 1 // point index in the data array
* }, {
* x: 521,
* y: 197,
* index: 2
* }],
* clusterId2: [{
* ...
* }]
* ...
* }</pre>
*
* `clusterId` (example above - unique id of a cluster or noise)
* is an array of points belonging to a cluster. If the
* array has only one point or fewer points than set in
* `cluster.minimumClusterSize` it won't be combined into a cluster.
*
* @sample maps/marker-clusters/optimized-kmeans
* Optimized K-Means algorithm
* @sample highcharts/marker-clusters/kmeans
* K-Means algorithm
* @sample highcharts/marker-clusters/grid
* Grid algorithm
* @sample maps/marker-clusters/custom-alg
* Custom algorithm
*
* @type {string|Function}
* @see [cluster.minimumClusterSize](#plotOptions.scatter.marker.cluster.minimumClusterSize)
* @apioption plotOptions.scatter.cluster.layoutAlgorithm.type
*/
/**
* When `type` is set to the `grid`,
* `gridSize` is a size of a grid square element either as a number
* defining pixels,
or a percentage defining a percentage
* of the plot area width.
*
* @type {number|string}
*/
gridSize: 50,
/**
* When `type` is set to `kmeans`,
* `iterations` are the number of iterations that this algorithm will be
* repeated to find clusters positions.
*
* @type {number}
* @apioption plotOptions.scatter.cluster.layoutAlgorithm.iterations
*/
/**
* When `type` is set to `kmeans`,
* `distance` is a maximum distance between point and cluster center
* so that this point will be inside the cluster. The distance
* is either a number defining pixels or a percentage
* defining a percentage of the plot area width.
*
* @type {number|string}
*/
distance: 40,
/**
* When `type` is set to `undefined` and there are more visible points
* than the kmeansThreshold the `grid` algorithm is used to find
* clusters,
otherwise `kmeans`. It ensures good performance on
* large datasets and better clusters arrangement after the zoom.
*/
kmeansThreshold: 100
},
/**
* Options for the cluster marker.
* @extends plotOptions.series.marker
* @excluding enabledThreshold,
states
* @type {Highcharts.PointMarkerOptionsObject}
*/
marker: {
/** @internal */
symbol: 'cluster',
/** @internal */
radius: 15,
/** @internal */
lineWidth: 0,
/** @internal */
lineColor: '#ffffff'
},
/**
* Fires when the cluster point is clicked and `drillToCluster` is enabled.
* One parameter,
`event`,
is passed to the function. The default action
* is to zoom to the cluster points range. This can be prevented
* by calling `event.preventDefault()`.
*
* @type {Highcharts.MarkerClusterDrillCallbackFunction}
* @product highcharts highmaps
* @see [cluster.drillToCluster](#plotOptions.scatter.marker.cluster.drillToCluster)
* @apioption plotOptions.scatter.cluster.events.drillToCluster
*/
/**
* An array defining zones within marker clusters.
*
* In styled mode,
the color zones are styled with the
* `.highcharts-cluster-zone-{n}` class,
or custom
* classed from the `className`
* option.
*
* @sample highcharts/marker-clusters/basic
* Marker clusters zones
* @sample maps/marker-clusters/custom-alg
* Zones on maps
*
* @type {Array<*>}
* @product highcharts highmaps
* @apioption plotOptions.scatter.cluster.zones
*/
/**
* Styled mode only. A custom class name for the zone.
*
* @sample highcharts/css/color-zones/
* Zones styled by class name
*
* @type {string}
* @apioption plotOptions.scatter.cluster.zones.className
*/
/**
* Settings for the cluster marker belonging to the zone.
*
* @see [cluster.marker](#plotOptions.scatter.cluster.marker)
* @extends plotOptions.scatter.cluster.marker
* @product highcharts highmaps
* @apioption plotOptions.scatter.cluster.zones.marker
*/
/**
* The value where the zone starts.
*
* @type {number}
* @product highcharts highmaps
* @apioption plotOptions.scatter.cluster.zones.from
*/
/**
* The value where the zone ends.
*
* @type {number}
* @product highcharts highmaps
* @apioption plotOptions.scatter.cluster.zones.to
*/
/**
* The fill color of the cluster marker in hover state. When
* `undefined`,
the series' or point's fillColor for normal
* state is used.
*
* @type {Highcharts.ColorType}
* @apioption plotOptions.scatter.cluster.states.hover.fillColor
*/
/**
* Options for the cluster data labels.
* @type {Highcharts.DataLabelsOptions}
*/
dataLabels: {
/** @internal */
enabled: true,
/** @internal */
format: '{point.clusterPointsAmount}',
/** @internal */
verticalAlign: 'middle',
/** @internal */
align: 'center',
/** @internal */
style: {
color: 'contrast'
},
/** @internal */
inside: true
}
};
(defaultOptions.plotOptions || {}).series = merge((defaultOptions.plotOptions || {}).series, {
cluster: clusterDefaultOptions,
tooltip: {
/**
* The HTML of the cluster point's in the tooltip. Works only with
* marker-clusters module and analogously to
* [pointFormat](#tooltip.pointFormat).
*
* The cluster tooltip can be also formatted using
* `tooltip.formatter` callback function and `point.isCluster` flag.
*
* @sample highcharts/marker-clusters/grid
* Format tooltip for cluster points.
*
* @sample maps/marker-clusters/europe/
* Format tooltip for clusters using tooltip.formatter
*
* @apioption tooltip.clusterFormat
*/
clusterFormat: '<span>Clustered points: ' +
'{point.clusterPointsAmount}</span><br/>'
}
});
// Utils.
/* eslint-disable require-jsdoc */
function getClusterPosition(points) {
var pointsLen = points.length,
sumX = 0,
sumY = 0,
i;
for (i = 0; i < pointsLen; i++) {
sumX += points[i].x;
sumY += points[i].y;
}
return {
x: sumX / pointsLen,
y: sumY / pointsLen
};
}
// Prepare array with sorted data objects to be
// compared in getPointsState method.
function getDataState(clusteredData, stateDataLen) {
var state = [];
state.length = stateDataLen;
clusteredData.clusters.forEach(function (cluster) {
cluster.data.forEach(function (elem) {
state[elem.dataIndex] = elem;
});
});
clusteredData.noise.forEach(function (noise) {
state[noise.data[0].dataIndex] = noise.data[0];
});
return state;
}
function fadeInElement(elem, opacity, animation) {
elem
.attr({
opacity: opacity
})
.animate({
opacity: 1
}, animation);
}
function fadeInStatePoint(stateObj, opacity, animation, fadeinGraphic, fadeinDataLabel) {
if (stateObj.point) {
if (fadeinGraphic && stateObj.point.graphic) {
stateObj.point.graphic.show();
fadeInElement(stateObj.point.graphic, opacity, animation);
}
if (fadeinDataLabel && stateObj.point.dataLabel) {
stateObj.point.dataLabel.show();
fadeInElement(stateObj.point.dataLabel, opacity, animation);
}
}
}
function hideStatePoint(stateObj, hideGraphic, hideDataLabel) {
if (stateObj.point) {
if (hideGraphic && stateObj.point.graphic) {
stateObj.point.graphic.hide();
}
if (hideDataLabel && stateObj.point.dataLabel) {
stateObj.point.dataLabel.hide();
}
}
}
function destroyOldPoints(oldState) {
if (oldState) {
objectEach(oldState, function (state) {
if (state.point && state.point.destroy) {
state.point.destroy();
}
});
}
}
function fadeInNewPointAndDestoryOld(newPointObj, oldPoints, animation, opacity) {
// Fade in new point.
fadeInStatePoint(newPointObj, opacity, animation, true, true);
// Destroy old animated points.
oldPoints.forEach(function (p) {
if (p.point && p.point.destroy) {
p.point.destroy();
}
});
}
// Generate unique stateId for a state element.
function getStateId() {
return Math.random().toString(36).substring(2, 7) + '-' + stateIdCounter++;
}
// Useful for debugging.
// function drawGridLines(
// series: Highcharts.Series,
// options: Highcharts.MarkerClusterLayoutAlgorithmOptions
// ): void {
// var chart = series.chart,
// xAxis = series.xAxis,
// yAxis = series.yAxis,
// xAxisLen = series.xAxis.len,
// yAxisLen = series.yAxis.len,
// i, j, elem, text,
// currentX = 0,
// currentY = 0,
// scaledGridSize = 50,
// gridX = 0,
// gridY = 0,
// gridOffset = series.getGridOffset(),
// mapXSize, mapYSize;
// if (series.debugGridLines && series.debugGridLines.length) {
// series.debugGridLines.forEach(function (gridItem): void {
// if (gridItem && gridItem.destroy) {
// gridItem.destroy();
// }
// });
// }
// series.debugGridLines = [];
// scaledGridSize = series.getScaledGridSize(options);
// mapXSize = Math.abs(
// xAxis.toPixels(xAxis.dataMax || 0) -
// xAxis.toPixels(xAxis.dataMin || 0)
// );
// mapYSize = Math.abs(
// yAxis.toPixels(yAxis.dataMax || 0) -
// yAxis.toPixels(yAxis.dataMin || 0)
// );
// gridX = Math.ceil(mapXSize / scaledGridSize);
// gridY = Math.ceil(mapYSize / scaledGridSize);
// for (i = 0; i < gridX; i++) {
// currentX = i * scaledGridSize;
// if (
// gridOffset.plotLeft + currentX >= 0 &&
// gridOffset.plotLeft + currentX < xAxisLen
// ) {
// for (j = 0; j < gridY; j++) {
// currentY = j * scaledGridSize;
// if (
// gridOffset.plotTop + currentY >= 0 &&
// gridOffset.plotTop + currentY < yAxisLen
// ) {
// if (j % 2 === 0 && i % 2 === 0) {
// var rect = chart.renderer
// .rect(
// gridOffset.plotLeft + currentX,
// gridOffset.plotTop + currentY,
// scaledGridSize * 2,
// scaledGridSize * 2
// )
// .attr({
// stroke: series.color,
// 'stroke-width': '2px'
// })
// .add()
// .toFront();
// series.debugGridLines.push(rect);
// }
// elem = chart.renderer
// .rect(
// gridOffset.plotLeft + currentX,
// gridOffset.plotTop + currentY,
// scaledGridSize,
// scaledGridSize
// )
// .attr({
// stroke: series.color,
// opacity: 0.3,
// 'stroke-width': '1px'
// })
// .add()
// .toFront();
// text = chart.renderer
// .text(
// j + '-' + i,
// gridOffset.plotLeft + currentX + 2,
// gridOffset.plotTop + currentY + 7
// )
// .css({
// fill: 'rgba(0, 0, 0, 0.7)',
// fontSize: '7px'
// })
// .add()
// .toFront();
// series.debugGridLines.push(elem);
// series.debugGridLines.push(text);
// }
// }
// }
// }
// }
/* eslint-enable require-jsdoc */
// Cluster symbol.
SVGRenderer.prototype.symbols.cluster = function (x, y, width, height) {
var w = width / 2,
h = height / 2,
outerWidth = 1,
space = 1,
inner,
outer1,
outer2;
inner = this.arc(x + w, y + h, w - space * 4, h - space * 4, {
start: Math.PI * 0.5,
end: Math.PI * 2.5,
open: false
});
outer1 = this.arc(x + w, y + h, w - space * 3, h - space * 3, {
start: Math.PI * 0.5,
end: Math.PI * 2.5,
innerR: w - outerWidth * 2,
open: false
});
outer2 = this.arc(x + w, y + h, w - space, h - space, {
start: Math.PI * 0.5,
end: Math.PI * 2.5,
innerR: w,
open: false
});
return outer2.concat(outer1, inner);
};
Scatter.prototype.animateClusterPoint = function (clusterObj) {
var series = this,
xAxis = series.xAxis,
yAxis = series.yAxis,
chart = series.chart,
clusterOptions = series.options.cluster,
animation = animObject((clusterOptions || {}).animation),
animDuration = animation.duration || 500,
pointsState = (series.markerClusterInfo || {}).pointsState,
newState = (pointsState || {}).newState,
oldState = (pointsState || {}).oldState,
parentId,
oldPointObj,
newPointObj,
oldPoints = [],
newPointBBox,
offset = 0,
newX = 0,
newY = 0,
isOldPointGrahic = false,
isCbHandled = false;
if (oldState && newState) {
newPointObj = newState[clusterObj.stateId];
newX = xAxis.toPixels(newPointObj.x) - chart.plotLeft;
newY = yAxis.toPixels(newPointObj.y) - chart.plotTop;
// Point has one ancestor.
if (newPointObj.parentsId.length === 1) {
parentId = (newState || {})[clusterObj.stateId].parentsId[0];
oldPointObj = oldState[parentId];
// If old and new poistions are the same do not animate.
if (newPointObj.point &&
newPointObj.point.graphic &&
oldPointObj &&
oldPointObj.point &&
oldPointObj.point.plotX &&
oldPointObj.point.plotY &&
oldPointObj.point.plotX !== newPointObj.point.plotX &&
oldPointObj.point.plotY !== newPointObj.point.plotY) {
newPointBBox = newPointObj.point.graphic.getBBox();
offset = newPointBBox.width / 2;
newPointObj.point.graphic.attr({
x: oldPointObj.point.plotX - offset,
y: oldPointObj.point.plotY - offset
});
newPointObj.point.graphic.animate({
x: newX - (newPointObj.point.graphic.radius || 0),
y: newY - (newPointObj.point.graphic.radius || 0)
}, animation, function () {
isCbHandled = true;
// Destroy old point.
if (oldPointObj.point && oldPointObj.point.destroy) {
oldPointObj.point.destroy();
}
});
// Data label animation.
if (newPointObj.point.dataLabel &&
newPointObj.point.dataLabel.alignAttr &&
oldPointObj.point.dataLabel &&
oldPointObj.point.dataLabel.alignAttr) {
newPointObj.point.dataLabel.attr({
x: oldPointObj.point.dataLabel.alignAttr.x,
y: oldPointObj.point.dataLabel.alignAttr.y
});
newPointObj.point.dataLabel.animate({
x: newPointObj.point.dataLabel.alignAttr.x,
y: newPointObj.point.dataLabel.alignAttr.y
}, animation);
}
}
}
else if (newPointObj.parentsId.length === 0) {
// Point has no ancestors - new point.
// Hide new point.
hideStatePoint(newPointObj, true, true);
syncTimeout(function () {
// Fade in new point.
fadeInStatePoint(newPointObj, 0.1, animation, true, true);
}, animDuration / 2);
}
else {
// Point has many ancestors.
// Hide new point before animation.
hideStatePoint(newPointObj, true, true);
newPointObj.parentsId.forEach(function (elem) {
if (oldState && oldState[elem]) {
oldPointObj = oldState[elem];
oldPoints.push(oldPointObj);
if (oldPointObj.point &&
oldPointObj.point.graphic) {
isOldPointGrahic = true;
oldPointObj.point.graphic.show();
oldPointObj.point.graphic.animate({
x: newX - (oldPointObj.point.graphic.radius || 0),
y: newY - (oldPointObj.point.graphic.radius || 0),
opacity: 0.4
}, animation, function () {
isCbHandled = true;
fadeInNewPointAndDestoryOld(newPointObj, oldPoints, animation, 0.7);
});
if (oldPointObj.point.dataLabel &&
oldPointObj.point.dataLabel.y !== -9999 &&
newPointObj.point &&
newPointObj.point.dataLabel &&
newPointObj.point.dataLabel.alignAttr) {
oldPointObj.point.dataLabel.show();
oldPointObj.point.dataLabel.animate({
x: newPointObj.point.dataLabel.alignAttr.x,
y: newPointObj.point.dataLabel.alignAttr.y,
opacity: 0.4
}, animation);
}
}
}
});
// Make sure point is faded in.
syncTimeout(function () {
if (!isCbHandled) {
fadeInNewPointAndDestoryOld(newPointObj, oldPoints, animation, 0.85);
}
}, animDuration);
if (!isOldPointGrahic) {
syncTimeout(function () {
fadeInNewPointAndDestoryOld(newPointObj, oldPoints, animation, 0.1);
}, animDuration / 2);
}
}
}
};
Scatter.prototype.getGridOffset = function () {
var series = this,
chart = series.chart,
xAxis = series.xAxis,
yAxis = series.yAxis,
plotLeft = 0,
plotTop = 0;
if (series.dataMinX && series.dataMaxX) {
plotLeft = xAxis.reversed ?
xAxis.toPixels(series.dataMaxX) : xAxis.toPixels(series.dataMinX);
}
else {
plotLeft = chart.plotLeft;
}
if (series.dataMinY && series.dataMaxY) {
plotTop = yAxis.reversed ?
yAxis.toPixels(series.dataMinY) : yAxis.toPixels(series.dataMaxY);
}
else {
plotTop = chart.plotTop;
}
return { plotLeft: plotLeft, plotTop: plotTop };
};
Scatter.prototype.getScaledGridSize = function (options) {
var series = this,
xAxis = series.xAxis,
search = true,
k = 1,
divider = 1,
processedGridSize = options.processedGridSize ||
clusterDefaultOptions.layoutAlgorithm.gridSize,
gridSize,
scale,
level;
if (!series.gridValueSize) {
series.gridValueSize = Math.abs(xAxis.toValue(processedGridSize) - xAxis.toValue(0));
}
gridSize = xAxis.toPixels(series.gridValueSize) - xAxis.toPixels(0);
scale = +(processedGridSize / gridSize).toFixed(14);
// Find the level and its divider.
while (search && scale !== 1) {
level = Math.pow(2, k);
if (scale > 0.75 && scale < 1.25) {
search = false;
}
else if (scale >= (1 / level) && scale < 2 * (1 / level)) {
search = false;
divider = level;
}
else if (scale <= level && scale > level / 2) {
search = false;
divider = 1 / level;
}
k++;
}
return (processedGridSize / divider) / scale;
};
Scatter.prototype.getRealExtremes = function () {
var _a,
_b;
var series = this,
chart = series.chart,
xAxis = series.xAxis,
yAxis = series.yAxis,
realMinX = xAxis ? xAxis.toValue(chart.plotLeft) : 0,
realMaxX = xAxis ?
xAxis.toValue(chart.plotLeft + chart.plotWidth) : 0,
realMinY = yAxis ? yAxis.toValue(chart.plotTop) : 0,
realMaxY = yAxis ?
yAxis.toValue(chart.plotTop + chart.plotHeight) : 0;
if (realMinX > realMaxX) {
_a = [realMinX, realMaxX], realMaxX = _a[0], realMinX = _a[1];
}
if (realMinY > realMaxY) {
_b = [realMinY, realMaxY], realMaxY = _b[0], realMinY = _b[1];
}
return {
minX: realMinX,
maxX: realMaxX,
minY: realMinY,
maxY: realMaxY
};
};
Scatter.prototype.onDrillToCluster = function (event) {
var point = event.point || event.target;
point.firePointEvent('drillToCluster', event, function (e) {
var _a,
_b;
var point = e.point || e.target,
series = point.series,
xAxis = point.series.xAxis,
yAxis = point.series.yAxis,
chart = point.series.chart,
clusterOptions = series.options.cluster,
drillToCluster = (clusterOptions || {}).drillToCluster,
offsetX,
offsetY,
sortedDataX,
sortedDataY,
minX,
minY,
maxX,
maxY;
if (drillToCluster && point.clusteredData) {
sortedDataX = point.clusteredData.map(function (data) {
return data.x;
}).sort(function (a, b) { return a - b; });
sortedDataY = point.clusteredData.map(function (data) {
return data.y;
}).sort(function (a, b) { return a - b; });
minX = sortedDataX[0];
maxX = sortedDataX[sortedDataX.length - 1];
minY = sortedDataY[0];
maxY = sortedDataY[sortedDataY.length - 1];
offsetX = Math.abs((maxX - minX) * 0.1);
offsetY = Math.abs((maxY - minY) * 0.1);
chart.pointer.zoomX = true;
chart.pointer.zoomY = true;
// Swap when minus values.
if (minX > maxX) {
_a = [maxX, minX], minX = _a[0], maxX = _a[1];
}
if (minY > maxY) {
_b = [maxY, minY], minY = _b[0], maxY = _b[1];
}
chart.zoom({
originalEvent: e,
xAxis: [{
axis: xAxis,
min: minX - offsetX,
max: maxX + offsetX
}],
yAxis: [{
axis: yAxis,
min: minY - offsetY,
max: maxY + offsetY
}]
});
}
});
};
Scatter.prototype.getClusterDistancesFromPoint = function (clusters, pointX, pointY) {
var series = this,
xAxis = series.xAxis,
yAxis = series.yAxis,
pointClusterDistance = [],
j,
distance;
for (j = 0; j < clusters.length; j++) {
distance = Math.sqrt(Math.pow(xAxis.toPixels(pointX) -
xAxis.toPixels(clusters[j].posX), 2) +
Math.pow(yAxis.toPixels(pointY) -
yAxis.toPixels(clusters[j].posY), 2));
pointClusterDistance.push({
clusterIndex: j,
distance: distance
});
}
return pointClusterDistance.sort(function (a, b) { return a.distance - b.distance; });
};
// Point state used when animation is enabled to compare
// and bind old points with new ones.
Scatter.prototype.getPointsState = function (clusteredData, oldMarkerClusterInfo, dataLength) {
var oldDataStateArr = oldMarkerClusterInfo ?
getDataState(oldMarkerClusterInfo,
dataLength) : [],
newDataStateArr = getDataState(clusteredData,
dataLength),
state = {},
newState,
oldState,
i;
// Clear global array before populate with new ids.
oldPointsStateId = [];
// Build points state structure.
clusteredData.clusters.forEach(function (cluster) {
state[cluster.stateId] = {
x: cluster.x,
y: cluster.y,
id: cluster.stateId,
point: cluster.point,
parentsId: []
};
});
clusteredData.noise.forEach(function (noise) {
state[noise.stateId] = {
x: noise.x,
y: noise.y,
id: noise.stateId,
point: noise.point,
parentsId: []
};
});
// Bind new and old state.
for (i = 0; i < newDataStateArr.length; i++) {
newState = newDataStateArr[i];
oldState = oldDataStateArr[i];
if (newState &&
oldState &&
newState.parentStateId &&
oldState.parentStateId &&
state[newState.parentStateId] &&
state[newState.parentStateId].parentsId.indexOf(oldState.parentStateId) === -1) {
state[newState.parentStateId].parentsId.push(oldState.parentStateId);
if (oldPointsStateId.indexOf(oldState.parentStateId) === -1) {
oldPointsStateId.push(oldState.parentStateId);
}
}
}
return state;
};
Scatter.prototype.markerClusterAlgorithms = {
grid: function (dataX, dataY, dataIndexes, options) {
var series = this,
xAxis = series.xAxis,
yAxis = series.yAxis,
grid = {},
gridOffset = series.getGridOffset(),
scaledGridSize,
x,
y,
gridX,
gridY,
key,
i;
// drawGridLines(series, options);
scaledGridSize = series.getScaledGridSize(options);
for (i = 0; i < dataX.length; i++) {
x = xAxis.toPixels(dataX[i]) - gridOffset.plotLeft;
y = yAxis.toPixels(dataY[i]) - gridOffset.plotTop;
gridX = Math.floor(x / scaledGridSize);
gridY = Math.floor(y / scaledGridSize);
key = gridY + '-' + gridX;
if (!grid[key]) {
grid[key] = [];
}
grid[key].push({
dataIndex: dataIndexes[i],
x: dataX[i],
y: dataY[i]
});
}
return grid;
},
kmeans: function (dataX, dataY, dataIndexes, options) {
var series = this,
clusters = [],
noise = [],
group = {},
pointMaxDistance = options.processedDistance ||
clusterDefaultOptions.layoutAlgorithm.distance,
iterations = options.iterations,
// Max pixel difference beetwen new and old cluster position.
maxClusterShift = 1,
currentIteration = 0,
repeat = true,
pointX = 0,
pointY = 0,
tempPos,
pointClusterDistance = [],
groupedData,
key,
i,
j;
options.processedGridSize = options.processedDistance;
// Use grid method to get groupedData object.
groupedData = series.markerClusterAlgorithms ?
series.markerClusterAlgorithms.grid.call(series, dataX, dataY, dataIndexes, options) : {};
// Find clusters amount and its start positions
// based on grid grouped data.
for (key in groupedData) {
if (groupedData[key].length > 1) {
tempPos = getClusterPosition(groupedData[key]);
clusters.push({
posX: tempPos.x,
posY: tempPos.y,
oldX: 0,
oldY: 0,
startPointsLen: groupedData[key].length,
points: []
});
}
}
// Start kmeans iteration process.
while (repeat) {
clusters.map(function (c) {
c.points.length = 0;
return c;
});
noise.length = 0;
for (i = 0; i < dataX.length; i++) {
pointX = dataX[i];
pointY = dataY[i];
pointClusterDistance = series.getClusterDistancesFromPoint(clusters, pointX, pointY);
if (pointClusterDistance.length &&
pointClusterDistance[0].distance < pointMaxDistance) {
clusters[pointClusterDistance[0].clusterIndex].points.push({
x: pointX,
y: pointY,
dataIndex: dataIndexes[i]
});
}
else {
noise.push({
x: pointX,
y: pointY,
dataIndex: dataIndexes[i]
});
}
}
// When cluster points array has only one point the
// point should be classified again.
for (j = 0; j < clusters.length; j++) {
if (clusters[j].points.length === 1) {
pointClusterDistance = series.getClusterDistancesFromPoint(clusters, clusters[j].points[0].x, clusters[j].points[0].y);
if (pointClusterDistance[1].distance < pointMaxDistance) {
// Add point to the next closest cluster.
clusters[pointClusterDistance[1].clusterIndex].points
.push(clusters[j].points[0]);
// Clear points array.
clusters[pointClusterDistance[0].clusterIndex]
.points.length = 0;
}
}
}
// Compute a new clusters position and check if it
// is different than the old one.
repeat = false;
for (j = 0; j < clusters.length; j++) {
tempPos = getClusterPosition(clusters[j].points);
clusters[j].oldX = clusters[j].posX;
clusters[j].oldY = clusters[j].posY;
clusters[j].posX = tempPos.x;
clusters[j].posY = tempPos.y;
// Repeat the algorithm if at least one cluster
// is shifted more than maxClusterShift property.
if (clusters[j].posX > clusters[j].oldX + maxClusterShift ||
clusters[j].posX < clusters[j].oldX - maxClusterShift ||
clusters[j].posY > clusters[j].oldY + maxClusterShift ||
clusters[j].posY < clusters[j].oldY - maxClusterShift) {
repeat = true;
}
}
// If iterations property is set repeat the algorithm
// specified amount of times.
if (iterations) {
repeat = currentIteration < iterations - 1;
}
currentIteration++;
}
clusters.forEach(function (cluster, i) {
group['cluster' + i] = cluster.points;
});
noise.forEach(function (noise, i) {
group['noise' + i] = [noise];
});
return group;
},
optimizedKmeans: function (processedXData, processedYData, dataIndexes, options) {
var series = this,
xAxis = series.xAxis,
yAxis = series.yAxis,
pointMaxDistance = options.processedDistance ||
clusterDefaultOptions.layoutAlgorithm.gridSize,
group = {},
extremes = series.getRealExtremes(),
clusterMarkerOptions = (series.options.cluster || {}).marker,
offset,
distance,
radius;
if (!series.markerClusterInfo || (series.initMaxX && series.initMaxX < extremes.maxX ||
series.initMinX && series.initMinX > extremes.minX ||
series.initMaxY && series.initMaxY < extremes.maxY ||
series.initMinY && series.initMinY > extremes.minY)) {
series.initMaxX = extremes.maxX;
series.initMinX = extremes.minX;
series.initMaxY = extremes.maxY;
series.initMinY = extremes.minY;
group = series.markerClusterAlgorithms ?
series.markerClusterAlgorithms.kmeans.call(series, processedXData, processedYData, dataIndexes, options) : {};
series.baseClusters = null;
}
else {
if (!series.baseClusters) {
series.baseClusters = {
clusters: series.markerClusterInfo.clusters,
noise: series.markerClusterInfo.noise
};
}
series.baseClusters.clusters.forEach(function (cluster) {
cluster.pointsOutside = [];
cluster.pointsInside = [];
cluster.data.forEach(function (dataPoint) {
distance = Math.sqrt(Math.pow(xAxis.toPixels(dataPoint.x) -
xAxis.toPixels(cluster.x), 2) +
Math.pow(yAxis.toPixels(dataPoint.y) -
yAxis.toPixels(cluster.y), 2));
if (cluster.clusterZone &&
cluster.clusterZone.marker &&
cluster.clusterZone.marker.radius) {
radius = cluster.clusterZone.marker.radius;
}
else if (clusterMarkerOptions &&
clusterMarkerOptions.radius) {
radius = clusterMarkerOptions.radius;
}
else {
radius = clusterDefaultOptions.marker.radius;
}
offset = pointMaxDistance - radius >= 0 ?
pointMaxDistance - radius : radius;
if (distance > radius + offset &&
defined(cluster.pointsOutside)) {
cluster.pointsOutside.push(dataPoint);
}
else if (defined(cluster.pointsInside)) {
cluster.pointsInside.push(dataPoint);
}
});
if (cluster.pointsInside.length) {
group[cluster.id] = cluster.pointsInside;
}
cluster.pointsOutside.forEach(function (p, i) {
group[cluster.id + '_noise' + i] = [p];
});
});
series.baseClusters.noise.forEach(function (noise) {
group[noise.id] = noise.data;
});
}
return group;
}
};
Scatter.prototype.preventClusterCollisions = function (props) {
var series = this,
xAxis = series.xAxis,
yAxis = series.yAxis,
_a = props.key.split('-').map(parseFloat),
gridY = _a[0],
gridX = _a[1],
gridSize = props.gridSize,
groupedData = props.groupedData,
defaultRadius = props.defaultRadius,
clusterRadius = props.clusterRadius,
gridXPx = gridX * gridSize,
gridYPx = gridY * gridSize,
xPixel = xAxis.toPixels(props.x),
yPixel = yAxis.toPixels(props.y),
gridsToCheckCollision = [],
pointsLen = 0,
radius = 0,
clusterMarkerOptions = (series.options.cluster || {}).marker,
zoneOptions = (series.options.cluster || {}).zones,
gridOffset = series.getGridOffset(),
nextXPixel,
nextYPixel,
signX,
signY,
cornerGridX,
cornerGridY,
i,
j,
itemX,
itemY,
nextClusterPos,
maxDist,
keys,
x,
y;
// Distance to the grid start.
xPixel -= gridOffset.plotLeft;
yPixel -= gridOffset.plotTop;
for (i = 1; i < 5; i++) {
signX = i % 2 ? -1 : 1;
signY = i < 3 ? -1 : 1;
cornerGridX = Math.floor((xPixel + signX * clusterRadius) / gridSize);
cornerGridY = Math.floor((yPixel + signY * clusterRadius) / gridSize);
keys = [
cornerGridY + '-' + cornerGridX,
cornerGridY + '-' + gridX,
gridY + '-' + cornerGridX
];
for (j = 0; j < keys.length; j++) {
if (gridsToCheckCollision.indexOf(keys[j]) === -1 &&
keys[j] !== props.key) {
gridsToCheckCollision.push(keys[j]);
}
}
}
gridsToCheckCollision.forEach(function (item) {
var _a;
if (groupedData[item]) {
// Cluster or noise position is already computed.
if (!groupedData[item].posX) {
nextClusterPos = getClusterPosition(groupedData[item]);
groupedData[item].posX = nextClusterPos.x;
groupedData[item].posY = nextClusterPos.y;
}
nextXPixel = xAxis.toPixels(groupedData[item].posX || 0) -
gridOffset.plotLeft;
nextYPixel = yAxis.toPixels(groupedData[item].posY || 0) -
gridOffset.plotTop;
_a = item.split('-').map(parseFloat), itemY = _a[0], itemX = _a[1];
if (zoneOptions) {
pointsLen = groupedData[item].length;
for (i = 0; i < zoneOptions.length; i++) {
if (pointsLen >= zoneOptions[i].from &&
pointsLen <= zoneOptions[i].to) {
if (defined((zoneOptions[i].marker || {}).radius)) {
radius = zoneOptions[i].marker.radius || 0;
}
else if (clusterMarkerOptions &&
clusterMarkerOptions.radius) {
radius = clusterMarkerOptions.radius;
}
else {
radius = clusterDefaultOptions.marker.radius;
}
}
}
}
if (groupedData[item].length > 1 &&
radius === 0 &&
clusterMarkerOptions &&
clusterMarkerOptions.radius) {
radius = clusterMarkerOptions.radius;
}
else if (groupedData[item].length === 1) {
radius = defaultRadius;
}
maxDist = clusterRadius + radius;
radius = 0;
if (itemX !== gridX &&
Math.abs(xPixel - nextXPixel) < maxDist) {
xPixel = itemX - gridX < 0 ? gridXPx + clusterRadius :
gridXPx + gridSize - clusterRadius;
}
if (itemY !== gridY &&
Math.abs(yPixel - nextYPixel) < maxDist) {
yPixel = itemY - gridY < 0 ? gridYPx + clusterRadius :
gridYPx + gridSize - clusterRadius;
}
}
});
x = xAxis.toValue(xPixel + gridOffset.plotLeft);
y = yAxis.toValue(yPixel + gridOffset.plotTop);
groupedData[props.key].posX = x;
groupedData[props.key].posY = y;
return { x: x, y: y };
};
// Check if user algorithm result is valid groupedDataObject.
Scatter.prototype.isValidGroupedDataObject = function (groupedData) {
var result = false,
i;
if (!isObject(groupedData)) {
return false;
}
objectEach(groupedData, function (elem) {
result = true;
if (!isArray(elem) || !elem.length) {
result = false;
return;
}
for (i = 0; i < elem.length; i++) {
if (!isObject(elem[i]) || (!elem[i].x || !elem[i].y)) {
result = false;
return;
}
}
});
return result;
};
Scatter.prototype.getClusteredData = function (groupedData, options) {
var series = this,
groupedXData = [],
groupedYData = [],
clusters = [], // Container for clusters.
noise = [], // Container for points not belonging to any cluster.
groupMap = [],
index = 0,
// Prevent minimumClusterSize lower than 2.
minimumClusterSize = Math.max(2,
options.minimumClusterSize || 2),
stateId,
point,
points,
pointUserOptions,
pointsLen,
marker,
clusterPos,
pointOptions,
clusterTempPos,
zoneOptions,
clusterZone,
clusterZoneClassName,
i,
k;
// Check if groupedData is valid when user uses a custom algorithm.
if (isFunction(options.layoutAlgorithm.type) &&
!series.isValidGroupedDataObject(groupedData)) {
error('Highcharts marker-clusters module: ' +
'The custom algorithm result is not valid!', false, series.chart);
return false;
}
for (k in groupedData) {
if (groupedData[k].length >= minimumClusterSize) {
points = groupedData[k];
stateId = getStateId();
pointsLen = points.length;
// Get zone options for cluster.
if (options.zones) {
for (i = 0; i < options.zones.length; i++) {
if (pointsLen >= options.zones[i].from &&
pointsLen <= options.zones[i].to) {
clusterZone = options.zones[i];
clusterZone.zoneIndex = i;
zoneOptions = options.zones[i].marker;
clusterZoneClassName = options.zones[i].className;
}
}
}
clusterTempPos = getClusterPosition(points);
if (options.layoutAlgorithm.type === 'grid' &&
!options.allowOverlap) {
marker = series.options.marker || {};
clusterPos = series.preventClusterCollisions({
x: clusterTempPos.x,
y: clusterTempPos.y,
key: k,
groupedData: groupedData,
gridSize: series.getScaledGridSize(options.layoutAlgorithm),
defaultRadius: marker.radius || 3 + (marker.lineWidth || 0),
clusterRadius: (zoneOptions && zoneOptions.radius) ?
zoneOptions.radius :
(options.marker || {}).radius ||
clusterDefaultOptions.marker.radius
});
}
else {
clusterPos = {
x: clusterTempPos.x,
y: clusterTempPos.y
};
}
for (i = 0; i < pointsLen; i++) {
points[i].parentStateId = stateId;
}
clusters.push({
x: clusterPos.x,
y: clusterPos.y,
id: k,
stateId: stateId,
index: index,
data: points,
clusterZone: clusterZone,
clusterZoneClassName: clusterZoneClassName
});
groupedXData.push(clusterPos.x);
groupedYData.push(clusterPos.y);
groupMap.push({
options: {
formatPrefix: 'cluster',
dataLabels: options.dataLabels,
marker: merge(options.marker, {
states: options.states
}, zoneOptions || {})
}
});
// Save cluster data points options.
if (series.options.data && series.options.data.length) {
for (i = 0; i < pointsLen; i++) {
if (isObject(series.options.data[points[i].dataIndex])) {
points[i].options =
series.options.data[points[i].dataIndex];
}
}
}
index++;
zoneOptions = null;
}
else {
for (i = 0; i < groupedData[k].length; i++) {
// Points not belonging to any cluster.
point = groupedData[k][i];
stateId = getStateId();
pointOptions = null;
pointUserOptions =
((series.options || {}).data || [])[point.dataIndex];
groupedXData.push(point.x);
groupedYData.push(point.y);
point.parentStateId = stateId;
noise.push({
x: point.x,
y: point.y,
id: k,
stateId: stateId,
index: index,
data: groupedData[k]
});
if (pointUserOptions &&
typeof pointUserOptions === 'object' &&
!isArray(pointUserOptions)) {
pointOptions = merge(pointUserOptions, { x: point.x, y: point.y });
}
else {
pointOptions = {
userOptions: pointUserOptions,
x: point.x,
y: point.y
};
}
groupMap.push({ options: pointOptions });
index++;
}
}
}
return {
clusters: clusters,
noise: noise,
groupedXData: groupedXData,
groupedYData: groupedYData,
groupMap: groupMap
};
};
// Destroy clustered data points.
Scatter.prototype.destroyClusteredData = function () {
var clusteredSeriesData = this.markerClusterSeriesData;
// Clear previous groups.
(clusteredSeriesData || []).forEach(function (point) {
if (point && point.destroy) {
point.destroy();
}
});
this.markerClusterSeriesData = null;
};
// Hide clustered data points.
Scatter.prototype.hideClusteredData = function () {
var series = this,
clusteredSeriesData = this.markerClusterSeriesData,
oldState = ((series.markerClusterInfo || {}).pointsState || {}).oldState || {},
oldPointsId = oldPointsStateId.map(function (elem) {
return (oldState[elem].point || {}).id || '';
});
(clusteredSeriesData || []).forEach(function (point) {
// If an old point is used in animation hide it, otherwise destroy.
if (point &&
oldPointsId.indexOf(point.id) !== -1) {
if (point.graphic) {
point.graphic.hide();
}
if (point.dataLabel) {
point.dataLabel.hide();
}
}
else {
if (point && point.destroy) {
point.destroy();
}
}
});
};
// Override the generatePoints method by adding a reference to grouped data.
Scatter.prototype.generatePoints = function () {
var series = this,
chart = series.chart,
xAxis = series.xAxis,
yAxis = series.yAxis,
clusterOptions = series.options.cluster,
realExtremes = series.getRealExtremes(),
visibleXData = [],
visibleYData = [],
visibleDataIndexes = [],
oldPointsState,
oldDataLen,
oldMarkerClusterInfo,
kmeansThreshold,
cropDataOffsetX,
cropDataOffsetY,
seriesMinX,
seriesMaxX,
seriesMinY,
seriesMaxY,
type,
algorithm,
clusteredData,
groupedData,
layoutAlgOptions,
point,
i;
if (clusterOptions &&
clusterOptions.enabled &&
series.xData &&
series.yData &&
!chart.polar) {
type = clusterOptions.layoutAlgorithm.type;
layoutAlgOptions = clusterOptions.layoutAlgorithm;
// Get processed algorithm properties.
layoutAlgOptions.processedGridSize = relativeLength(layoutAlgOptions.gridSize ||
clusterDefaultOptions.layoutAlgorithm.gridSize, chart.plotWidth);
layoutAlgOptions.processedDistance = relativeLength(layoutAlgOptions.distance ||
clusterDefaultOptions.layoutAlgorithm.distance, chart.plotWidth);
kmeansThreshold = layoutAlgOptions.kmeansThreshold ||
clusterDefaultOptions.layoutAlgorithm.kmeansThreshold;
// Offset to prevent cluster size changes.
cropDataOffsetX = Math.abs(xAxis.toValue(layoutAlgOptions.processedGridSize / 2) -
xAxis.toValue(0));
cropDataOffsetY = Math.abs(yAxis.toValue(layoutAlgOptions.processedGridSize / 2) -
yAxis.toValue(0));
// Get only visible data.
for (i = 0; i < series.xData.length; i++) {
if (!series.dataMaxX) {
if (!defined(seriesMaxX) ||
!defined(seriesMinX) ||
!defined(seriesMaxY) ||
!defined(seriesMinY)) {
seriesMaxX = seriesMinX = series.xData[i];
seriesMaxY = seriesMinY = series.yData[i];
}
else if (isNumber(series.yData[i]) &&
isNumber(seriesMaxY) &&
isNumber(seriesMinY)) {
seriesMaxX = Math.max(series.xData[i], seriesMaxX);
seriesMinX = Math.min(series.xData[i], seriesMinX);
seriesMaxY = Math.max(series.yData[i] || seriesMaxY, seriesMaxY);
seriesMinY = Math.min(series.yData[i] || seriesMinY, seriesMinY);
}
}
// Crop data to visible ones with appropriate offset to prevent
// cluster size changes on the edge of the plot area.
if (series.xData[i] >= (realExtremes.minX - cropDataOffsetX) &&
series.xData[i] <= (realExtremes.maxX + cropDataOffsetX) &&
(series.yData[i] || realExtremes.minY) >=
(realExtremes.minY - cropDataOffsetY) &&
(series.yData[i] || realExtremes.maxY) <=
(realExtremes.maxY + cropDataOffsetY)) {
visibleXData.push(series.xData[i]);
visibleYData.push(series.yData[i]);
visibleDataIndexes.push(i);
}
}
// Save data max values.
if (defined(seriesMaxX) && defined(seriesMinX) &&
isNumber(seriesMaxY) && isNumber(seriesMinY)) {
series.dataMaxX = seriesMaxX;
series.dataMinX = seriesMinX;
series.dataMaxY = seriesMaxY;
series.dataMinY = seriesMinY;
}
if (isFunction(type)) {
algorithm = type;
}
else if (series.markerClusterAlgorithms) {
if (type && series.markerClusterAlgorithms[type]) {
algorithm = series.markerClusterAlgorithms[type];
}
else {
algorithm = visibleXData.length < kmeansThreshold ?
series.markerClusterAlgorithms.kmeans :
series.markerClusterAlgorithms.grid;
}
}
else {
algorithm = function () {
return false;
};
}
groupedData = algorithm.call(this, visibleXData, visibleYData, visibleDataIndexes, layoutAlgOptions);
clusteredData = groupedData ? series.getClusteredData(groupedData, clusterOptions) : groupedData;
// When animation is enabled get old points state.
if (clusterOptions.animation &&
series.markerClusterInfo &&
series.markerClusterInfo.pointsState &&
series.markerClusterInfo.pointsState.oldState) {
// Destroy old points.
destroyOldPoints(series.markerClusterInfo.pointsState.oldState);
oldPointsState = series.markerClusterInfo.pointsState.newState;
}
else {
oldPointsState = {};
}
// Save points old state info.
oldDataLen = series.xData.length;
oldMarkerClusterInfo = series.markerClusterInfo;
if (clusteredData) {
series.processedXData = clusteredData.groupedXData;
series.processedYData = clusteredData.groupedYData;
series.hasGroupedData = true;
series.markerClusterInfo = clusteredData;
series.groupMap = clusteredData.groupMap;
}
baseGeneratePoints.apply(this);
if (clusteredData && series.markerClusterInfo) {
// Mark cluster points. Safe point reference in the cluster object.
(series.markerClusterInfo.clusters || []).forEach(function (cluster) {
point = series.points[cluster.index];
point.isCluster = true;
point.clusteredData = cluster.data;
point.clusterPointsAmount = cluster.data.length;
cluster.point = point;
// Add zoom to cluster range.
addEvent(point, 'click', series.onDrillToCluster);
});
// Safe point reference in the noise object.
(series.markerClusterInfo.noise || []).forEach(function (noise) {
noise.point = series.points[noise.index];
});
// When animation is enabled save points state.
if (clusterOptions.animation &&
series.markerClusterInfo) {
series.markerClusterInfo.pointsState = {
oldState: oldPointsState,
newState: series.getPointsState(clusteredData, oldMarkerClusterInfo, oldDataLen)
};
}
// Record grouped data in order to let it be destroyed the next time
// processData runs.
if (!clusterOptions.animation) {
this.destroyClusteredData();
}
else {
this.hideClusteredData();
}
this.markerClusterSeriesData =
this.hasGroupedData ? this.points : null;
}
}
else {
baseGeneratePoints.apply(this);
}
};
// Handle animation.
addEvent(Chart, 'render', function () {
var chart = this;
(chart.series || []).forEach(function (series) {
if (series.markerClusterInfo) {
var options = series.options.cluster,
pointsState = (series.markerClusterInfo || {}).pointsState,
oldState = (pointsState || {}).oldState;
if ((options || {}).animation &&
series.markerClusterInfo &&
series.chart.pointer.pinchDown.length === 0 &&
(series.xAxis.eventArgs || {}).trigger !== 'pan' &&
oldState &&
Object.keys(oldState).length) {
series.markerClusterInfo.clusters.forEach(function (cluster) {
series.animateClusterPoint(cluster);
});
series.markerClusterInfo.noise.forEach(function (noise) {
series.animateClusterPoint(noise);
});
}
}
});
});
// Override point prototype to throw a warning when trying to update
// clustered point.
addEvent(Point, 'update', function () {
if (this.dataGroup) {
error('Highcharts marker-clusters module: ' +
'Running `Point.update` when point belongs to clustered series' +
' is not supported.', false, this.series.chart);
return false;
}
});
// Destroy grouped data on series destroy.
addEvent(Series, 'destroy', Scatter.prototype.destroyClusteredData);
// Add classes, change mouse cursor.
addEvent(Series, 'afterRender', function () {
var series = this,
clusterZoomEnabled = (series.options.cluster || {}).drillToCluster;
if (series.markerClusterInfo && series.markerClusterInfo.clusters) {
series.markerClusterInfo.clusters.forEach(function (cluster) {
if (cluster.point && cluster.point.graphic) {
cluster.point.graphic.addClass('highcharts-cluster-point');
// Change cursor to pointer when drillToCluster is enabled.
if (clusterZoomEnabled && cluster.point) {
cluster.point.graphic.css({
cursor: 'pointer'
});
if (cluster.point.dataLabel) {
cluster.point.dataLabel.css({
cursor: 'pointer'
});
}
}
if (defined(cluster.clusterZone)) {
cluster.point.graphic.addClass(cluster.clusterZoneClassName ||
'highcharts-cluster-zone-' +
cluster.clusterZone.zoneIndex);
}
}
});
}
});
addEvent(Point, 'drillToCluster', function (event) {
var point = event.point || event.target,
series = point.series,
clusterOptions = series.options.cluster,
onDrillToCluster = ((clusterOptions || {}).events || {}).drillToCluster;
if (isFunction(onDrillToCluster)) {
onDrillToCluster.call(this, event);
}
});
// Destroy the old tooltip after zoom.
addEvent(Axis, 'setExtremes', function () {
var chart = this.chart,
animationDuration = 0,
animation;
chart.series.forEach(function (series) {
if (series.markerClusterInfo) {
animation = animObject((series.options.cluster || {}).animation);
animationDuration = animation.duration || 0;
}
});
syncTimeout(function () {
if (chart.tooltip) {
chart.tooltip.destroy();
}
}, animationDuration);
});
});
_registerModule(_modules, 'masters/modules/marker-clusters.src.js', [], function () {
});
})); | cdnjs/cdnjs | ajax/libs/highcharts/8.2.2/modules/marker-clusters.src.js | JavaScript | mit | 81,899 |
package name.abuchen.portfolio.snapshot;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
import java.time.LocalDate;
import java.util.Locale;
import org.junit.Test;
public class AggregationTest
{
@Test
public void testWeekly()
{
// first day of week is locale dependent
Locale locale = Locale.getDefault();
Locale.setDefault(Locale.GERMANY);
try
{
assertThat(Aggregation.Period.WEEKLY.getStartDateFor(LocalDate.of(2011, 10, 17)), //
is(LocalDate.of(2011, 10, 17)));
assertThat(Aggregation.Period.WEEKLY.getStartDateFor(LocalDate.of(2012, 1, 13)), //
is(LocalDate.of(2012, 1, 9)));
assertThat(Aggregation.Period.WEEKLY.getStartDateFor(LocalDate.of(2012, 8, 10)), //
is(LocalDate.of(2012, 8, 6)));
assertThat(Aggregation.Period.WEEKLY.getStartDateFor(LocalDate.of(2012, 9, 23)), //
is(LocalDate.of(2012, 9, 17)));
}
finally
{
Locale.setDefault(locale);
}
}
@Test
public void testMonthly()
{
assertThat(Aggregation.Period.MONTHLY.getStartDateFor(LocalDate.of(2011, 10, 17)), //
is(LocalDate.of(2011, 10, 1)));
assertThat(Aggregation.Period.MONTHLY.getStartDateFor(LocalDate.of(2012, 1, 13)), //
is(LocalDate.of(2012, 1, 1)));
assertThat(Aggregation.Period.MONTHLY.getStartDateFor(LocalDate.of(2012, 8, 10)), //
is(LocalDate.of(2012, 8, 1)));
assertThat(Aggregation.Period.MONTHLY.getStartDateFor(LocalDate.of(2012, 9, 23)), //
is(LocalDate.of(2012, 9, 1)));
}
@Test
public void testQuartlerly()
{
assertThat(Aggregation.Period.QUARTERLY.getStartDateFor(LocalDate.of(2011, 10, 17)), //
is(LocalDate.of(2011, 10, 1)));
assertThat(Aggregation.Period.QUARTERLY.getStartDateFor(LocalDate.of(2012, 1, 13)), //
is(LocalDate.of(2012, 1, 1)));
assertThat(Aggregation.Period.QUARTERLY.getStartDateFor(LocalDate.of(2012, 8, 10)), //
is(LocalDate.of(2012, 7, 1)));
assertThat(Aggregation.Period.QUARTERLY.getStartDateFor(LocalDate.of(2012, 9, 23)), //
is(LocalDate.of(2012, 7, 1)));
}
@Test
public void testYearly()
{
assertThat(Aggregation.Period.YEARLY.getStartDateFor(LocalDate.of(2011, 10, 17)), //
is(LocalDate.of(2011, 1, 1)));
assertThat(Aggregation.Period.YEARLY.getStartDateFor(LocalDate.of(2012, 1, 13)), //
is(LocalDate.of(2012, 1, 1)));
assertThat(Aggregation.Period.YEARLY.getStartDateFor(LocalDate.of(2012, 8, 10)), //
is(LocalDate.of(2012, 1, 1)));
assertThat(Aggregation.Period.YEARLY.getStartDateFor(LocalDate.of(2012, 9, 23)), //
is(LocalDate.of(2012, 1, 1)));
}
}
| cmaoling/portfolio | name.abuchen.portfolio.tests/src/name/abuchen/portfolio/snapshot/AggregationTest.java | Java | epl-1.0 | 3,118 |
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.binding.milight.internal.handler;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.openhab.binding.milight.internal.MilightThingState;
import org.openhab.binding.milight.internal.protocol.QueueItem;
import org.openhab.binding.milight.internal.protocol.QueuedSend;
import org.openhab.core.thing.Thing;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class implements common functionality for Milight/Easybulb bulbs of protocol version 3.
* Most of the implementation is found in the specific bulb classes though.
* The class is state-less, use {@link MilightThingState} instead.
*
* @author David Graeff - Initial contribution
*/
@NonNullByDefault
public abstract class AbstractLedV3Handler extends AbstractLedHandler {
public static final int MAX_ANIM_MODES = 10;
protected final Logger logger = LoggerFactory.getLogger(AbstractLedV3Handler.class);
public AbstractLedV3Handler(Thing thing, QueuedSend sendQueue, int typeOffset) {
super(thing, sendQueue, typeOffset);
}
// we have to map [0,360] to [0,0xFF], where red equals hue=0 and the milight color 0xB0 (=176)
public static byte makeColor(int hue) {
int mHue = (360 + 248 - hue) % 360; // invert and shift
return (byte) (mHue * 255 / 360); // map to 256 values
}
@Override
public void setLedMode(int mode, MilightThingState state) {
// Not supported
}
@Override
public void setSaturation(int value, MilightThingState state) {
// Not supported
}
@Override
public void changeSaturation(int relativeSaturation, MilightThingState state) {
// Not supported
}
protected QueueItem createRepeatable(byte[] data) {
return QueueItem.createRepeatable(socket, delayTimeMS, repeatTimes, address, port, data);
}
protected QueueItem createRepeatable(int uidc, byte[] data) {
return new QueueItem(socket, uidc, data, true, delayTimeMS, repeatTimes, address, port);
}
protected QueueItem createNonRepeatable(byte[] data) {
return QueueItem.createNonRepeatable(socket, delayTimeMS, address, port, data);
}
}
| openhab/openhab2 | bundles/org.openhab.binding.milight/src/main/java/org/openhab/binding/milight/internal/handler/AbstractLedV3Handler.java | Java | epl-1.0 | 2,556 |
package de.desy.language.snl.parser.parser;
import java.util.regex.Matcher;
import de.desy.language.snl.codeElements.PredefinedTypes;
import de.desy.language.snl.parser.Interval;
import de.desy.language.snl.parser.nodes.VariableNode;
public class VariableParser extends
AbstractOptimizedStatementParser<VariableNode> {
public VariableParser(Interval[] exclusions) {
super(exclusions);
}
@Override
protected String getPostPatternString() {
return "(\\s*;)";
}
@Override
protected String getPatternString() {
return getPrePatternString() + "([a-zA-Z_][0-9a-zA-Z_]*)(\\s*\\[\\s*\\d+\\s*\\])*"
+ getPostPatternString();
}
@Override
protected String getPrePatternString() {
final PredefinedTypes[] predefinedTypes = PredefinedTypes.values();
final StringBuffer typeBuffer = new StringBuffer(predefinedTypes[0]
.getElementName());
for (int i = 1; i < predefinedTypes.length; i++) {
final PredefinedTypes predefinedType = predefinedTypes[i];
if (!predefinedType.equals(PredefinedTypes.EVFLAG)) {
typeBuffer.append("|");
typeBuffer.append(predefinedType.getElementName());
}
}
return "(" + typeBuffer.toString() + ")(\\s+)";
}
@Override
protected void matchFound(final Matcher preMatcher,
final Matcher mainMatcher) {
this._statement = mainMatcher.group();
this._startOffSet = mainMatcher.start();
this._endOffSet = preMatcher.end() - 1;
final String type = mainMatcher.group(1);
final String varName = mainMatcher.group(3);
final boolean isArray = mainMatcher.group(4) != null;
this._found = true;
this._node = new VariableNode(varName, type, this
.getStartOffsetLastFound(), this.getEndOffsetLastFound(), isArray);
}
}
| ControlSystemStudio/cs-studio | applications/snl/snl-plugins/de.desy.language.snl/src/de/desy/language/snl/parser/parser/VariableParser.java | Java | epl-1.0 | 1,943 |
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
package org.opendaylight.controller.sal.core.api.notify;
import org.opendaylight.controller.sal.core.api.Broker;
import org.opendaylight.controller.sal.core.api.Provider;
import org.opendaylight.yangtools.yang.data.api.CompositeNode;
/**
* Notification Publishing Service
*
* The simplified process of the notification publishing is following:
*
* <ol>
* <li> {@link Provider} invokes {@link #sendNotification(CompositeNode)}
* <li> {@link Broker} finds {@link NotificationListener}s which subscribed for
* the notification type.
*
* <li>For each subscriber {@link Broker} invokes
* {@link NotificationListener#onNotification(CompositeNode)}
* </ol>
*/
public interface NotificationPublishService extends NotificationService {
/**
* Publishes a notification.
*
* Notification type is determined by the
* {@link CompositeNode#getNodeType()} of the
* <code>notification<code> parameter.
*
* @param notification
* Notification to publish
*/
void publish(CompositeNode notification);
}
| yuyf10/opendaylight-controller | opendaylight/md-sal/sal-dom-api/src/main/java/org/opendaylight/controller/sal/core/api/notify/NotificationPublishService.java | Java | epl-1.0 | 1,374 |