code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9
values | license stringclasses 15
values | size int32 3 1.05M |
|---|---|---|---|---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* A service for maintaining form-related metadata and linking that data to
* corresponding controllers and templates.
*/
angular.module('form').provider('formService', function formServiceProvider() {
/**
* Reference to the provider itself.
*
* @type formServiceProvider
*/
var provider = this;
/**
* Map of all registered field type definitions by name.
*
* @type Object.<String, FieldType>
*/
this.fieldTypes = {
/**
* Text field type.
*
* @see {@link Field.Type.TEXT}
* @type FieldType
*/
'TEXT' : {
module : 'form',
controller : 'textFieldController',
templateUrl : 'app/form/templates/textField.html'
},
/**
* Email address field type.
*
* @see {@link Field.Type.EMAIL}
* @type FieldType
*/
'EMAIL' : {
templateUrl : 'app/form/templates/emailField.html'
},
/**
* Numeric field type.
*
* @see {@link Field.Type.NUMERIC}
* @type FieldType
*/
'NUMERIC' : {
module : 'form',
controller : 'numberFieldController',
templateUrl : 'app/form/templates/numberField.html'
},
/**
* Boolean field type.
*
* @see {@link Field.Type.BOOLEAN}
* @type FieldType
*/
'BOOLEAN' : {
module : 'form',
controller : 'checkboxFieldController',
templateUrl : 'app/form/templates/checkboxField.html'
},
/**
* Username field type. Identical in principle to a text field, but may
* have different semantics.
*
* @see {@link Field.Type.USERNAME}
* @type FieldType
*/
'USERNAME' : {
templateUrl : 'app/form/templates/textField.html'
},
/**
* Password field type. Similar to a text field, but the contents of
* the field are masked.
*
* @see {@link Field.Type.PASSWORD}
* @type FieldType
*/
'PASSWORD' : {
module : 'form',
controller : 'passwordFieldController',
templateUrl : 'app/form/templates/passwordField.html'
},
/**
* Enumerated field type. The user is presented a finite list of values
* to choose from.
*
* @see {@link Field.Type.ENUM}
* @type FieldType
*/
'ENUM' : {
module : 'form',
controller : 'selectFieldController',
templateUrl : 'app/form/templates/selectField.html'
},
/**
* Multiline field type. The user may enter multiple lines of text.
*
* @see {@link Field.Type.MULTILINE}
* @type FieldType
*/
'MULTILINE' : {
templateUrl : 'app/form/templates/textAreaField.html'
},
/**
* Field type which allows selection of languages. The languages
* displayed are the set of languages supported by the Guacamole web
* application. Legal values are valid language IDs, as dictated by
* the filenames of Guacamole's available translations.
*
* @see {@link Field.Type.LANGUAGE}
* @type FieldType
*/
'LANGUAGE' : {
module : 'form',
controller : 'languageFieldController',
templateUrl : 'app/form/templates/languageField.html'
},
/**
* Field type which allows selection of time zones.
*
* @see {@link Field.Type.TIMEZONE}
* @type FieldType
*/
'TIMEZONE' : {
module : 'form',
controller : 'timeZoneFieldController',
templateUrl : 'app/form/templates/timeZoneField.html'
},
/**
* Field type which allows selection of individual dates.
*
* @see {@link Field.Type.DATE}
* @type FieldType
*/
'DATE' : {
module : 'form',
controller : 'dateFieldController',
templateUrl : 'app/form/templates/dateField.html'
},
/**
* Field type which allows selection of times of day.
*
* @see {@link Field.Type.TIME}
* @type FieldType
*/
'TIME' : {
module : 'form',
controller : 'timeFieldController',
templateUrl : 'app/form/templates/timeField.html'
},
/**
* Field type which allows selection of color schemes accepted by the
* Guacamole server terminal emulator and protocols which leverage it.
*
* @see {@link Field.Type.TERMINAL_COLOR_SCHEME}
* @type FieldType
*/
'TERMINAL_COLOR_SCHEME' : {
module : 'form',
controller : 'terminalColorSchemeFieldController',
templateUrl : 'app/form/templates/terminalColorSchemeField.html'
}
};
/**
* Registers a new field type under the given name.
*
* @param {String} fieldTypeName
* The name which uniquely identifies the field type being registered.
*
* @param {FieldType} fieldType
* The field type definition to associate with the given name.
*/
this.registerFieldType = function registerFieldType(fieldTypeName, fieldType) {
// Store field type
provider.fieldTypes[fieldTypeName] = fieldType;
};
// Factory method required by provider
this.$get = ['$injector', function formServiceFactory($injector) {
// Required services
var $compile = $injector.get('$compile');
var $q = $injector.get('$q');
var $templateRequest = $injector.get('$templateRequest');
var service = {};
service.fieldTypes = provider.fieldTypes;
/**
* Compiles and links the field associated with the given name to the given
* scope, producing a distinct and independent DOM Element which functions
* as an instance of that field. The scope object provided must include at
* least the following properties:
*
* namespace:
* A String which defines the unique namespace associated the
* translation strings used by the form using a field of this type.
*
* fieldId:
* A String value which is reasonably likely to be unique and may
* be used to associate the main element of the field with its
* label.
*
* field:
* The Field object that is being rendered, representing a field of
* this type.
*
* model:
* The current String value of the field, if any.
*
* disabled:
* A boolean value which is true if the field should be disabled.
* If false or undefined, the field should be enabled.
*
* @param {Element} fieldContainer
* The DOM Element whose contents should be replaced with the
* compiled field template.
*
* @param {String} fieldTypeName
* The name of the field type defining the nature of the element to be
* created.
*
* @param {Object} scope
* The scope to which the new element will be linked.
*
* @return {Promise.<Element>}
* A Promise which resolves to the compiled Element. If an error occurs
* while retrieving the field type, this Promise will be rejected.
*/
service.insertFieldElement = function insertFieldElement(fieldContainer,
fieldTypeName, scope) {
// Ensure field type is defined
var fieldType = provider.fieldTypes[fieldTypeName];
if (!fieldType)
return $q.reject();
var templateRequest;
// Use raw HTML template if provided
if (fieldType.template) {
var deferredTemplate = $q.defer();
deferredTemplate.resolve(fieldType.template);
templateRequest = deferredTemplate.promise;
}
// If no raw HTML template is provided, retrieve template from URL
else if (fieldType.templateUrl)
templateRequest = $templateRequest(fieldType.templateUrl);
// Otherwise, use empty template
else {
var emptyTemplate= $q.defer();
emptyTemplate.resolve('');
templateRequest = emptyTemplate.promise;
}
// Defer compilation of template pending successful retrieval
var compiledTemplate = $q.defer();
// Resolve with compiled HTML upon success
templateRequest.then(function templateRetrieved(html) {
// Insert template into DOM
fieldContainer.innerHTML = html;
// Populate scope using defined controller
if (fieldType.module && fieldType.controller) {
var $controller = angular.injector(['ng', fieldType.module]).get('$controller');
$controller(fieldType.controller, {
'$scope' : scope,
'$element' : angular.element(fieldContainer.childNodes)
});
}
// Compile DOM with populated scope
compiledTemplate.resolve($compile(fieldContainer.childNodes)(scope));
})
// Reject on failure
['catch'](function templateError() {
compiledTemplate.reject();
});
// Return promise which resolves to the compiled template
return compiledTemplate.promise;
};
return service;
}];
});
| jmuehlner/incubator-guacamole-client | guacamole/src/main/webapp/app/form/services/formService.js | JavaScript | apache-2.0 | 10,923 |
package com.cell.user.web.shrio.filter.authc;
import javax.servlet.ServletRequest;
import org.apache.shiro.SecurityUtils;
import org.apache.shiro.authc.AuthenticationException;
import org.apache.shiro.web.filter.authc.FormAuthenticationFilter;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import com.cell.user.vo.single.SysUserVo;
import com.cell.user.web.service.UserService;
/**
* 基于几点修改: 1、onLoginFailure 时 把异常添加到request attribute中 而不是异常类名 2、登录成功时:成功页面重定向:
* 2.1、如果前一个页面是登录页面,-->2.3 2.2、如果有SavedRequest 则返回到SavedRequest
* 2.3、否则根据当前登录的用户决定返回到管理员首页/前台首页
* <p/>
*/
public class CustomFormAuthenticationFilter extends FormAuthenticationFilter {
@Autowired
UserService userService;
/**
* 默认的成功地址
*/
@Value("${shiro.default.success.url}")
private String defaultSuccessUrl;
/**
* 管理员默认的成功地址
*/
@Value("${shiro.admin.default.success.url}")
private String adminDefaultSuccessUrl;
@Override
protected void setFailureAttribute(ServletRequest request,
AuthenticationException ae) {
request.setAttribute(getFailureKeyAttribute(), ae);
}
/**
* 根据用户选择成功地址
*
* @return
*/
@Override
public String getSuccessUrl() {
String username = (String) SecurityUtils.getSubject().getPrincipal();
SysUserVo user = userService.findByUsername(username);
if (user != null && Boolean.TRUE.equals(user.getAdmin())) {
return this.adminDefaultSuccessUrl;
}
return this.defaultSuccessUrl;
}
}
| TonyYang9527/Magic-Web | src/main/java/com/cell/user/web/shrio/filter/authc/CustomFormAuthenticationFilter.java | Java | apache-2.0 | 1,780 |
/*
Copyright 2014 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package proxy
import (
"fmt"
"io"
"net"
"time"
"github.com/GoogleCloudPlatform/kubernetes/pkg/api"
"github.com/golang/glog"
)
// Proxier is a simple proxy for tcp connections between a localhost:lport and services that provide
// the actual implementations.
type Proxier struct {
loadBalancer LoadBalancer
serviceMap map[string]int
}
func NewProxier(loadBalancer LoadBalancer) *Proxier {
return &Proxier{loadBalancer: loadBalancer, serviceMap: make(map[string]int)}
}
func CopyBytes(in, out *net.TCPConn) {
glog.Infof("Copying from %v <-> %v <-> %v <-> %v",
in.RemoteAddr(), in.LocalAddr(), out.LocalAddr(), out.RemoteAddr())
_, err := io.Copy(in, out)
if err != nil && err != io.EOF {
glog.Errorf("I/O error: %v", err)
}
in.CloseRead()
out.CloseWrite()
}
// Create a bidirectional byte shuffler. Copies bytes to/from each connection.
func ProxyConnection(in, out *net.TCPConn) {
glog.Infof("Creating proxy between %v <-> %v <-> %v <-> %v",
in.RemoteAddr(), in.LocalAddr(), out.LocalAddr(), out.RemoteAddr())
go CopyBytes(in, out)
go CopyBytes(out, in)
}
func (proxier Proxier) AcceptHandler(service string, listener net.Listener) {
for {
inConn, err := listener.Accept()
if err != nil {
glog.Errorf("Accept failed: %v", err)
continue
}
glog.Infof("Accepted connection from: %v to %v", inConn.RemoteAddr(), inConn.LocalAddr())
// Figure out where this request should go.
endpoint, err := proxier.loadBalancer.LoadBalance(service, inConn.RemoteAddr())
if err != nil {
glog.Errorf("Couldn't find an endpoint for %s %v", service, err)
inConn.Close()
continue
}
glog.Infof("Mapped service %s to endpoint %s", service, endpoint)
outConn, err := net.DialTimeout("tcp", endpoint, time.Duration(5)*time.Second)
// We basically need to take everything from inConn and send to outConn
// and anything coming from outConn needs to be sent to inConn.
if err != nil {
glog.Errorf("Dial failed: %v", err)
inConn.Close()
continue
}
go ProxyConnection(inConn.(*net.TCPConn), outConn.(*net.TCPConn))
}
}
// AddService starts listening for a new service on a given port.
func (proxier Proxier) AddService(service string, port int) error {
// Make sure we can start listening on the port before saying all's well.
l, err := net.Listen("tcp", fmt.Sprintf(":%d", port))
if err != nil {
return err
}
proxier.addServiceCommon(service, l)
return nil
}
// addService starts listening for a new service, returning the port it's using.
// For testing on a system with unknown ports used.
func (proxier Proxier) addServiceOnUnusedPort(service string) (string, error) {
// Make sure we can start listening on the port before saying all's well.
l, err := net.Listen("tcp", ":0")
if err != nil {
return "", err
}
proxier.addServiceCommon(service, l)
_, port, err := net.SplitHostPort(l.Addr().String())
return port, nil
}
func (proxier Proxier) addServiceCommon(service string, l net.Listener) {
glog.Infof("Listening for %s on %s", service, l.Addr().String())
// If that succeeds, start the accepting loop.
go proxier.AcceptHandler(service, l)
}
func (proxier Proxier) OnUpdate(services []api.Service) {
glog.Infof("Received update notice: %+v", services)
for _, service := range services {
port, exists := proxier.serviceMap[service.ID]
if !exists || port != service.Port {
glog.Infof("Adding a new service %s on port %d", service.ID, service.Port)
err := proxier.AddService(service.ID, service.Port)
if err == nil {
proxier.serviceMap[service.ID] = service.Port
} else {
glog.Infof("Failed to start listening for %s on %d", service.ID, service.Port)
}
}
}
}
| CaptTofu/kubernetes | pkg/proxy/proxier.go | GO | apache-2.0 | 4,258 |
#region S# License
/******************************************************************************************
NOTICE!!! This program and source code is owned and licensed by
StockSharp, LLC, www.stocksharp.com
Viewing or use of this code requires your acceptance of the license
agreement found at https://github.com/StockSharp/StockSharp/blob/master/LICENSE
Removal of this comment is a violation of the license agreement.
Project: StockSharp.Algo.Candles.Algo
File: Candle.cs
Created: 2015, 11, 11, 2:32 PM
Copyright 2010 by StockSharp, LLC
*******************************************************************************************/
#endregion S# License
namespace StockSharp.Algo.Candles
{
using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.Serialization;
using Ecng.Common;
using Ecng.Collections;
using StockSharp.BusinessEntities;
using StockSharp.Messages;
using StockSharp.Localization;
/// <summary>
/// Base candle class (contains main parameters).
/// </summary>
[DataContract]
[Serializable]
[KnownType(typeof(TickCandle))]
[KnownType(typeof(VolumeCandle))]
[KnownType(typeof(RangeCandle))]
[KnownType(typeof(TimeFrameCandle))]
[KnownType(typeof(PnFCandle))]
[KnownType(typeof(RenkoCandle))]
public abstract class Candle : Cloneable<Candle>
{
/// <summary>
/// Security.
/// </summary>
[DataMember]
[DisplayNameLoc(LocalizedStrings.SecurityKey)]
[DescriptionLoc(LocalizedStrings.SecurityKey, true)]
public Security Security { get; set; }
/// <summary>
/// Open time.
/// </summary>
[DataMember]
[DisplayNameLoc(LocalizedStrings.CandleOpenTimeKey)]
[DescriptionLoc(LocalizedStrings.CandleOpenTimeKey, true)]
public DateTimeOffset OpenTime { get; set; }
/// <summary>
/// Close time.
/// </summary>
[DataMember]
[DisplayNameLoc(LocalizedStrings.CandleCloseTimeKey)]
[DescriptionLoc(LocalizedStrings.CandleCloseTimeKey, true)]
public DateTimeOffset CloseTime { get; set; }
/// <summary>
/// High time.
/// </summary>
[DataMember]
[DisplayNameLoc(LocalizedStrings.CandleHighTimeKey)]
[DescriptionLoc(LocalizedStrings.CandleHighTimeKey, true)]
public DateTimeOffset HighTime { get; set; }
/// <summary>
/// Low time.
/// </summary>
[DataMember]
[DisplayNameLoc(LocalizedStrings.CandleLowTimeKey)]
[DescriptionLoc(LocalizedStrings.CandleLowTimeKey, true)]
public DateTimeOffset LowTime { get; set; }
/// <summary>
/// Opening price.
/// </summary>
[DataMember]
[DisplayNameLoc(LocalizedStrings.Str79Key)]
[DescriptionLoc(LocalizedStrings.Str80Key)]
public decimal OpenPrice { get; set; }
/// <summary>
/// Closing price.
/// </summary>
[DataMember]
[DisplayNameLoc(LocalizedStrings.ClosingPriceKey)]
[DescriptionLoc(LocalizedStrings.Str86Key)]
public decimal ClosePrice { get; set; }
/// <summary>
/// Highest price.
/// </summary>
[DataMember]
[DisplayNameLoc(LocalizedStrings.HighestPriceKey)]
[DescriptionLoc(LocalizedStrings.Str82Key)]
public decimal HighPrice { get; set; }
/// <summary>
/// Lowest price.
/// </summary>
[DataMember]
[DisplayNameLoc(LocalizedStrings.LowestPriceKey)]
[DescriptionLoc(LocalizedStrings.Str84Key)]
public decimal LowPrice { get; set; }
/// <summary>
/// Total price size.
/// </summary>
[DataMember]
[DisplayNameLoc(LocalizedStrings.TotalPriceKey)]
public decimal TotalPrice { get; set; }
/// <summary>
/// Volume at open.
/// </summary>
[DataMember]
[DisplayNameLoc(LocalizedStrings.OpenVolumeKey)]
public decimal? OpenVolume { get; set; }
/// <summary>
/// Volume at close.
/// </summary>
[DataMember]
[DisplayNameLoc(LocalizedStrings.CloseVolumeKey)]
public decimal? CloseVolume { get; set; }
/// <summary>
/// Volume at high.
/// </summary>
[DataMember]
[DisplayNameLoc(LocalizedStrings.HighVolumeKey)]
public decimal? HighVolume { get; set; }
/// <summary>
/// Volume at low.
/// </summary>
[DataMember]
[DisplayNameLoc(LocalizedStrings.LowVolumeKey)]
public decimal? LowVolume { get; set; }
/// <summary>
/// Total volume.
/// </summary>
[DataMember]
[DisplayNameLoc(LocalizedStrings.VolumeKey)]
[DescriptionLoc(LocalizedStrings.TotalCandleVolumeKey)]
public decimal TotalVolume { get; set; }
/// <summary>
/// Relative volume.
/// </summary>
[DataMember]
[DisplayNameLoc(LocalizedStrings.RelativeVolumeKey)]
public decimal? RelativeVolume { get; set; }
/// <summary>
/// Candle arg.
/// </summary>
public abstract object Arg { get; set; }
/// <summary>
/// Number of ticks.
/// </summary>
[DataMember]
[DisplayNameLoc(LocalizedStrings.TicksKey)]
[DescriptionLoc(LocalizedStrings.TickCountKey)]
public int? TotalTicks { get; set; }
/// <summary>
/// Number of up trending ticks.
/// </summary>
[DataMember]
[DisplayNameLoc(LocalizedStrings.TickUpKey)]
[DescriptionLoc(LocalizedStrings.TickUpCountKey)]
public int? UpTicks { get; set; }
/// <summary>
/// Number of down trending ticks.
/// </summary>
[DataMember]
[DisplayNameLoc(LocalizedStrings.TickDownKey)]
[DescriptionLoc(LocalizedStrings.TickDownCountKey)]
public int? DownTicks { get; set; }
private CandleStates _state;
/// <summary>
/// State.
/// </summary>
[DataMember]
[DisplayNameLoc(LocalizedStrings.StateKey)]
[DescriptionLoc(LocalizedStrings.CandleStateKey, true)]
public CandleStates State
{
get => _state;
set
{
ThrowIfFinished();
_state = value;
}
}
/// <summary>
/// Price levels.
/// </summary>
[DataMember]
[DisplayNameLoc(LocalizedStrings.PriceLevelsKey)]
public IEnumerable<CandlePriceLevel> PriceLevels { get; set; }
/// <summary>
/// <see cref="PriceLevels"/> with minimum <see cref="CandlePriceLevel.TotalVolume"/>.
/// </summary>
public CandlePriceLevel? MinPriceLevel => PriceLevels?.OrderBy(l => l.TotalVolume).FirstOr();
/// <summary>
/// <see cref="PriceLevels"/> with maximum <see cref="CandlePriceLevel.TotalVolume"/>.
/// </summary>
public CandlePriceLevel? MaxPriceLevel => PriceLevels?.OrderByDescending(l => l.TotalVolume).FirstOr();
/// <summary>
/// Open interest.
/// </summary>
[DataMember]
[DisplayNameLoc(LocalizedStrings.OIKey)]
[DescriptionLoc(LocalizedStrings.OpenInterestKey)]
public decimal? OpenInterest { get; set; }
/// <summary>
/// Sequence number.
/// </summary>
/// <remarks>Zero means no information.</remarks>
[DataMember]
public long SeqNum { get; set; }
/// <summary>
/// Determines the message is generated from the specified <see cref="DataType"/>.
/// </summary>
[DataMember]
public DataType BuildFrom { get; set; }
/// <inheritdoc />
public override string ToString()
{
return "{0:HH:mm:ss} {1} (O:{2}, H:{3}, L:{4}, C:{5}, V:{6})"
.Put(OpenTime, GetType().Name + "_" + Security + "_" + Arg, OpenPrice, HighPrice, LowPrice, ClosePrice, TotalVolume);
}
private void ThrowIfFinished()
{
if (State == CandleStates.Finished)
throw new InvalidOperationException(LocalizedStrings.Str649);
}
/// <summary>
/// Copy the message into the <paramref name="destination" />.
/// </summary>
/// <typeparam name="TCandle">The candle type.</typeparam>
/// <param name="destination">The object, to which copied information.</param>
/// <returns>The object, to which copied information.</returns>
protected TCandle CopyTo<TCandle>(TCandle destination)
where TCandle : Candle
{
destination.Arg = Arg;
destination.ClosePrice = ClosePrice;
destination.CloseTime = CloseTime;
destination.CloseVolume = CloseVolume;
destination.DownTicks = DownTicks;
destination.HighPrice = HighPrice;
destination.HighTime = HighTime;
destination.HighVolume = HighVolume;
destination.LowPrice = LowPrice;
destination.LowTime = LowTime;
destination.LowVolume = LowVolume;
destination.OpenInterest = OpenInterest;
destination.OpenPrice = OpenPrice;
destination.OpenTime = OpenTime;
destination.OpenVolume = OpenVolume;
destination.RelativeVolume = RelativeVolume;
destination.Security = Security;
//destination.Series = Series;
//destination.Source = Source;
//destination.State = State;
destination.TotalPrice = TotalPrice;
destination.TotalTicks = TotalTicks;
destination.TotalVolume = TotalVolume;
//destination.VolumeProfileInfo = VolumeProfileInfo;
destination.PriceLevels = PriceLevels?./*Select(l => l.Clone()).*/ToArray();
destination.SeqNum = SeqNum;
destination.BuildFrom = BuildFrom;
return destination;
}
}
/// <summary>
/// Time-frame candle.
/// </summary>
[DataContract]
[Serializable]
[DisplayNameLoc(LocalizedStrings.TimeFrameCandleKey)]
public class TimeFrameCandle : Candle
{
/// <summary>
/// Time-frame.
/// </summary>
[DataMember]
public TimeSpan TimeFrame { get; set; }
/// <inheritdoc />
public override object Arg
{
get => TimeFrame;
set => TimeFrame = (TimeSpan)value;
}
/// <summary>
/// Create a copy of <see cref="TimeFrameCandle"/>.
/// </summary>
/// <returns>Copy.</returns>
public override Candle Clone()
{
return CopyTo(new TimeFrameCandle());
}
}
/// <summary>
/// Tick candle.
/// </summary>
[DataContract]
[Serializable]
[DisplayNameLoc(LocalizedStrings.TickCandleKey)]
public class TickCandle : Candle
{
private int _maxTradeCount;
/// <summary>
/// Maximum tick count.
/// </summary>
[DataMember]
public int MaxTradeCount
{
get => _maxTradeCount;
set
{
if (value < 0)
throw new ArgumentOutOfRangeException(nameof(value));
_maxTradeCount = value;
}
}
/// <inheritdoc />
public override object Arg
{
get => MaxTradeCount;
set => MaxTradeCount = (int)value;
}
/// <summary>
/// Create a copy of <see cref="TickCandle"/>.
/// </summary>
/// <returns>Copy.</returns>
public override Candle Clone()
{
return CopyTo(new TickCandle());
}
}
/// <summary>
/// Volume candle.
/// </summary>
[DataContract]
[Serializable]
[DisplayNameLoc(LocalizedStrings.VolumeCandleKey)]
public class VolumeCandle : Candle
{
private decimal _volume;
/// <summary>
/// Maximum volume.
/// </summary>
[DataMember]
public decimal Volume
{
get => _volume;
set
{
if (value < 0)
throw new ArgumentOutOfRangeException(nameof(value));
_volume = value;
}
}
/// <inheritdoc />
public override object Arg
{
get => Volume;
set => Volume = (decimal)value;
}
/// <summary>
/// Create a copy of <see cref="VolumeCandle"/>.
/// </summary>
/// <returns>Copy.</returns>
public override Candle Clone()
{
return CopyTo(new VolumeCandle());
}
}
/// <summary>
/// Range candle.
/// </summary>
[DataContract]
[Serializable]
[DisplayNameLoc(LocalizedStrings.RangeCandleKey)]
public class RangeCandle : Candle
{
private Unit _priceRange;
/// <summary>
/// Range of price.
/// </summary>
[DataMember]
public Unit PriceRange
{
get => _priceRange;
set => _priceRange = value ?? throw new ArgumentNullException(nameof(value));
}
/// <inheritdoc />
public override object Arg
{
get => PriceRange;
set => PriceRange = (Unit)value;
}
/// <summary>
/// Create a copy of <see cref="RangeCandle"/>.
/// </summary>
/// <returns>Copy.</returns>
public override Candle Clone()
{
return CopyTo(new RangeCandle());
}
}
/// <summary>
/// The candle of point-and-figure chart (tac-toe chart).
/// </summary>
[DataContract]
[Serializable]
[DisplayNameLoc(LocalizedStrings.PnFCandleKey)]
public class PnFCandle : Candle
{
private PnFArg _pnFArg;
/// <summary>
/// Value of arguments.
/// </summary>
[DataMember]
public PnFArg PnFArg
{
get => _pnFArg;
set => _pnFArg = value ?? throw new ArgumentNullException(nameof(value));
}
///// <summary>
///// Type of symbols.
///// </summary>
//[DataMember]
//public PnFTypes Type { get; set; }
/// <inheritdoc />
public override object Arg
{
get => PnFArg;
set => PnFArg = (PnFArg)value;
}
/// <summary>
/// Create a copy of <see cref="PnFCandle"/>.
/// </summary>
/// <returns>Copy.</returns>
public override Candle Clone()
{
return CopyTo(new PnFCandle());
}
}
/// <summary>
/// Renko candle.
/// </summary>
[DataContract]
[Serializable]
[DisplayNameLoc(LocalizedStrings.RenkoCandleKey)]
public class RenkoCandle : Candle
{
private Unit _boxSize;
/// <summary>
/// Possible price change range.
/// </summary>
[DataMember]
public Unit BoxSize
{
get => _boxSize;
set => _boxSize = value ?? throw new ArgumentNullException(nameof(value));
}
/// <inheritdoc />
public override object Arg
{
get => BoxSize;
set => BoxSize = (Unit)value;
}
/// <summary>
/// Create a copy of <see cref="RenkoCandle"/>.
/// </summary>
/// <returns>Copy.</returns>
public override Candle Clone()
{
return CopyTo(new RenkoCandle());
}
}
/// <summary>
/// Heikin ashi candle.
/// </summary>
[DataContract]
[Serializable]
[DisplayNameLoc(LocalizedStrings.HeikinAshiKey)]
public class HeikinAshiCandle : TimeFrameCandle
{
/// <summary>
/// Create a copy of <see cref="HeikinAshiCandle"/>.
/// </summary>
/// <returns>Copy.</returns>
public override Candle Clone()
{
return CopyTo(new HeikinAshiCandle());
}
}
}
| StockSharp/StockSharp | Algo/Candles/Candle.cs | C# | apache-2.0 | 13,470 |
# -*- coding: utf-8 -*-
#
# Copyright (c) 2013 Clione Software
# Copyright (c) 2010-2013 Cidadania S. Coop. Galega
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
from django.core.validators import RegexValidator
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from core.spaces.file_validation import ContentTypeRestrictedFileField
from fields import StdImageField
from allowed_types import ALLOWED_CONTENT_TYPES
class Space(models.Model):
"""
Spaces model. This model stores a "space" or "place" also known as a
participative process in reality. Every place has a minimum set of
settings for customization.
There are three main permission roles in every space: administrator
(admins), moderators (mods) and regular users (users).
"""
name = models.CharField(_('Name'), max_length=250, unique=True,
help_text=_('Max: 250 characters'))
url = models.CharField(_('URL'), max_length=100, unique=True,
validators=[RegexValidator(regex='^[a-z0-9_]+$',
message='Invalid characters in the space URL.')],
help_text=_('Valid characters are lowercase, digits and \
underscore. This will be the accesible URL'))
description = models.TextField(_('Description'),
default=_('Write here your description.'))
pub_date = models.DateTimeField(_('Date of creation'), auto_now_add=True)
author = models.ForeignKey(User, blank=True, null=True,
verbose_name=_('Space creator'), help_text=_('Select a user that \
will be marked as creator of the space'))
logo = StdImageField(upload_to='spaces/logos', size=(100, 75, False),
help_text = _('Valid extensions are jpg, jpeg, png and gif'))
banner = StdImageField(upload_to='spaces/banners', size=(500, 75, False),
help_text = _('Valid extensions are jpg, jpeg, png and gif'))
public = models.BooleanField(_('Public space'), help_text=_("This will \
make the space visible to everyone, but registration will be \
necessary to participate."))
# Modules
mod_debate = models.BooleanField(_('Debate'))
mod_proposals = models.BooleanField(_('Proposals'))
mod_news = models.BooleanField(_('News'))
mod_cal = models.BooleanField(_('Calendar'))
mod_docs = models.BooleanField(_('Documents'))
mod_voting = models.BooleanField(_('Voting'))
class Meta:
ordering = ['name']
verbose_name = _('Space')
verbose_name_plural = _('Spaces')
get_latest_by = 'pub_date'
permissions = (
('view_space', 'Can view this space.'),
('admin_space', 'Can administrate this space.'),
('mod_space', 'Can moderate this space.')
)
def __unicode__(self):
return self.name
@models.permalink
def get_absolute_url(self):
return ('space-index', (), {
'space_url': self.url})
class Entity(models.Model):
"""
This model stores the name of the entities responsible for the creation
of the space or supporting it.
"""
name = models.CharField(_('Name'), max_length=100, unique=True)
website = models.CharField(_('Website'), max_length=100, null=True,
blank=True)
logo = models.ImageField(upload_to='spaces/logos', verbose_name=_('Logo'),
blank=True, null=True)
space = models.ForeignKey(Space, blank=True, null=True)
class Meta:
ordering = ['name']
verbose_name = _('Entity')
verbose_name_plural = _('Entities')
def __unicode__(self):
return self.name
class Document(models.Model):
"""
This models stores documents for the space, like a document repository,
There is no restriction in what a user can upload to the space.
:methods: get_file_ext, get_file_size
"""
title = models.CharField(_('Document title'), max_length=100,
help_text=_('Max: 100 characters'))
space = models.ForeignKey(Space, blank=True, null=True,
help_text=_('Change the space to whom belongs this document'))
docfile = ContentTypeRestrictedFileField(_('File'),
upload_to='spaces/documents/%Y/%m/%d',
content_types=ALLOWED_CONTENT_TYPES,
max_upload_size=26214400,
help_text=_('Permitted file types: DOC, DOCX, PPT, ODT, ODF, ODP, \
PDF, RST, TXT.'))
pub_date = models.DateTimeField(auto_now_add=True)
author = models.ForeignKey(User, verbose_name=_('Author'), blank=True,
null=True, help_text=_('Change the user that will figure as the \
author'))
def get_file_ext(self):
filename = self.docfile.name
extension = filename.split('.')
return extension[1].upper()
def get_file_size(self):
if self.docfile.size < 1023:
return str(self.docfile.size) + " Bytes"
elif self.docfile.size >= 1024 and self.docfile.size <= 1048575:
return str(round(self.docfile.size / 1024.0, 2)) + " KB"
elif self.docfile.size >= 1048576:
return str(round(self.docfile.size / 1024000.0, 2)) + " MB"
class Meta:
ordering = ['pub_date']
verbose_name = _('Document')
verbose_name_plural = _('Documents')
get_latest_by = 'pub_date'
# There is no 'view-document' view, so I'll leave the get_absolute_url
# method without permalink. Remember that the document files are accesed
# through the url() method in templates.
def get_absolute_url(self):
return '/spaces/%s/docs/%s' % (self.space.url, self.id)
class Event(models.Model):
"""
Meeting data model. Every space (process) has N meetings. This will
keep record of the assistants, meeting name, etc.
"""
title = models.CharField(_('Event name'), max_length=250,
help_text="Max: 250 characters")
space = models.ForeignKey(Space, blank=True, null=True)
user = models.ManyToManyField(User, verbose_name=_('Users'),
help_text=_('List of the users that will assist or assisted to the \
event.'))
pub_date = models.DateTimeField(auto_now_add=True)
event_author = models.ForeignKey(User, verbose_name=_('Created by'),
blank=True, null=True, related_name='meeting_author',
help_text=_('Select the user that will be designated as author.'))
event_date = models.DateTimeField(verbose_name=_('Event date'),
help_text=_('Select the date where the event is celebrated.'))
description = models.TextField(_('Description'), blank=True, null=True)
location = models.TextField(_('Location'), blank=True, null=True)
latitude = models.DecimalField(_('Latitude'), blank=True, null=True,
max_digits=17, decimal_places=15, help_text=_('Specify it in decimal'))
longitude = models.DecimalField(_('Longitude'), blank=True, null=True,
max_digits=17, decimal_places=15, help_text=_('Specify it in decimal'))
def is_due(self):
if self.event_date < datetime.now():
return True
else:
return False
class Meta:
ordering = ['event_date']
verbose_name = _('Event')
verbose_name_plural = _('Events')
get_latest_by = 'event_date'
permissions = (
('view_event', 'Can view this event'),
('admin_event', 'Can administrate this event'),
('mod_event', 'Can moderate this event'),
)
def __unicode__(self):
return self.title
@models.permalink
def get_absolute_url(self):
return ('view-event', (), {
'space_url': self.space.url,
'event_id': str(self.id)})
class Intent(models.Model):
"""
Intent data model. Intent stores the reference of a user-token when a user
asks entering in a restricted space.
.. versionadded: 0.1.5
"""
user = models.ForeignKey(User)
space = models.ForeignKey(Space)
token = models.CharField(max_length=32)
requested_on = models.DateTimeField(auto_now_add=True)
def get_approve_url(self):
site = Site.objects.all()[0]
return "http://%s%sintent/approve/%s" % (site.domain, self.space.get_absolute_url(), self.token)
| cidadania/e-cidadania | src/core/spaces/models.py | Python | apache-2.0 | 8,796 |
# Copyright (C) 2015 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.urls import reverse
from django.utils.translation import gettext_lazy as _
from django.utils.translation import ngettext_lazy
from horizon import tables
from openstack_dashboard import api
from openstack_dashboard import policy
class AddProtocol(policy.PolicyTargetMixin, tables.LinkAction):
name = "create"
verbose_name = _("Add Protocol")
url = "horizon:identity:identity_providers:protocols:create"
classes = ("ajax-modal",)
icon = "plus"
policy_rules = (("identity", "identity:create_protocol"),)
def get_link_url(self, datum=None):
idp_id = self.table.kwargs['identity_provider_id']
return reverse(self.url, args=(idp_id,))
class RemoveProtocol(policy.PolicyTargetMixin, tables.DeleteAction):
@staticmethod
def action_present(count):
return ngettext_lazy(
"Delete Protocol",
"Delete Protocols",
count
)
@staticmethod
def action_past(count):
return ngettext_lazy(
"Deleted Protocol",
"Deleted Protocols",
count
)
policy_rules = (("identity", "identity:delete_protocol"),)
def delete(self, request, obj_id):
identity_provider = self.table.kwargs['identity_provider_id']
protocol = obj_id
api.keystone.protocol_delete(request, identity_provider, protocol)
class ProtocolsTable(tables.DataTable):
protocol = tables.Column("id",
verbose_name=_("Protocol ID"))
mapping = tables.Column("mapping_id",
verbose_name=_("Mapping ID"))
def get_object_display(self, datum):
return datum.id
class Meta(object):
name = "idp_protocols"
verbose_name = _("Protocols")
table_actions = (AddProtocol, RemoveProtocol)
row_actions = (RemoveProtocol, )
| openstack/horizon | openstack_dashboard/dashboards/identity/identity_providers/protocols/tables.py | Python | apache-2.0 | 2,469 |
package com.ua.hower.house;
/**
* Created by poliveira on 27/10/2014.
*/
public interface NavigationDrawerCallbacks {
void onNavigationDrawerItemSelected(int position);
}
| PrudhviRaju123/Tour | app/src/main/java/com/ua/hower/house/NavigationDrawerCallbacks.java | Java | apache-2.0 | 178 |
/*
* Copyright 2015-2020 OpenCB
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.opencb.opencga.core.models.study;
import java.util.ArrayList;
import java.util.List;
public class VariableSetCreateParams {
private String id;
private String name;
private Boolean unique;
private Boolean confidential;
private String description;
private List<VariableSet.AnnotableDataModels> entities;
private List<Variable> variables;
public VariableSetCreateParams() {
}
public VariableSetCreateParams(String id, String name, Boolean unique, Boolean confidential, String description,
List<VariableSet.AnnotableDataModels> entities, List<Variable> variables) {
this.id = id;
this.name = name;
this.unique = unique;
this.confidential = confidential;
this.description = description;
this.entities = entities;
this.variables = variables;
}
public static VariableSetCreateParams of(VariableSet variableSet) {
return new VariableSetCreateParams(variableSet.getId(), variableSet.getName(), variableSet.isUnique(), variableSet.isConfidential(),
variableSet.getDescription(), variableSet.getEntities(), new ArrayList<>(variableSet.getVariables()));
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("VariableSetCreateParams{");
sb.append("id='").append(id).append('\'');
sb.append(", name='").append(name).append('\'');
sb.append(", unique=").append(unique);
sb.append(", confidential=").append(confidential);
sb.append(", description='").append(description).append('\'');
sb.append(", entities=").append(entities);
sb.append(", variables=").append(variables);
sb.append('}');
return sb.toString();
}
public Boolean getUnique() {
return unique;
}
public VariableSetCreateParams setUnique(Boolean unique) {
this.unique = unique;
return this;
}
public Boolean getConfidential() {
return confidential;
}
public VariableSetCreateParams setConfidential(Boolean confidential) {
this.confidential = confidential;
return this;
}
public String getId() {
return id;
}
public VariableSetCreateParams setId(String id) {
this.id = id;
return this;
}
public String getName() {
return name;
}
public VariableSetCreateParams setName(String name) {
this.name = name;
return this;
}
public String getDescription() {
return description;
}
public VariableSetCreateParams setDescription(String description) {
this.description = description;
return this;
}
public List<VariableSet.AnnotableDataModels> getEntities() {
return entities;
}
public VariableSetCreateParams setEntities(List<VariableSet.AnnotableDataModels> entities) {
this.entities = entities;
return this;
}
public List<Variable> getVariables() {
return variables;
}
public VariableSetCreateParams setVariables(List<Variable> variables) {
this.variables = variables;
return this;
}
}
| j-coll/opencga | opencga-core/src/main/java/org/opencb/opencga/core/models/study/VariableSetCreateParams.java | Java | apache-2.0 | 3,818 |
# Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""nsx_gw_devices
Revision ID: 19180cf98af6
Revises: 117643811bca
Create Date: 2014-02-26 02:46:26.151741
"""
# revision identifiers, used by Alembic.
revision = '19180cf98af6'
down_revision = '117643811bca'
# Change to ['*'] if this migration applies to all plugins
migration_for_plugins = [
'neutron.plugins.nicira.NeutronPlugin.NvpPluginV2',
'neutron.plugins.nicira.NeutronServicePlugin.NvpAdvancedPlugin',
'neutron.plugins.vmware.plugin.NsxPlugin',
'neutron.plugins.vmware.plugin.NsxServicePlugin'
]
from alembic import op
import sqlalchemy as sa
from neutron.db import migration
def upgrade(active_plugins=None, options=None):
if not migration.should_run(active_plugins, migration_for_plugins):
return
op.create_table(
'networkgatewaydevicereferences',
sa.Column('id', sa.String(length=36), nullable=False),
sa.Column('network_gateway_id', sa.String(length=36), nullable=True),
sa.Column('interface_name', sa.String(length=64), nullable=True),
sa.ForeignKeyConstraint(['network_gateway_id'], ['networkgateways.id'],
ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', 'network_gateway_id', 'interface_name'),
mysql_engine='InnoDB')
# Copy data from networkgatewaydevices into networkgatewaydevicereference
op.execute("INSERT INTO networkgatewaydevicereferences SELECT "
"id, network_gateway_id, interface_name FROM "
"networkgatewaydevices")
# drop networkgatewaydevices
op.drop_table('networkgatewaydevices')
op.create_table(
'networkgatewaydevices',
sa.Column('tenant_id', sa.String(length=255), nullable=True),
sa.Column('id', sa.String(length=36), nullable=False),
sa.Column('nsx_id', sa.String(length=36), nullable=True),
sa.Column('name', sa.String(length=255), nullable=True),
sa.Column('connector_type', sa.String(length=10), nullable=True),
sa.Column('connector_ip', sa.String(length=64), nullable=True),
sa.Column('status', sa.String(length=16), nullable=True),
sa.PrimaryKeyConstraint('id'),
mysql_engine='InnoDB')
# Create a networkgatewaydevice for each existing reference.
# For existing references nsx_id == neutron_id
# Do not fill conenctor info as they would be unknown
op.execute("INSERT INTO networkgatewaydevices (id, nsx_id) SELECT "
"id, id as nsx_id FROM networkgatewaydevicereferences")
def downgrade(active_plugins=None, options=None):
if not migration.should_run(active_plugins, migration_for_plugins):
return
op.drop_table('networkgatewaydevices')
# Re-create previous version of networkgatewaydevices table
op.create_table(
'networkgatewaydevices',
sa.Column('id', sa.String(length=36), nullable=False),
sa.Column('network_gateway_id', sa.String(length=36), nullable=True),
sa.Column('interface_name', sa.String(length=64), nullable=True),
sa.ForeignKeyConstraint(['network_gateway_id'], ['networkgateways.id'],
ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
mysql_engine='InnoDB')
# Copy from networkgatewaydevicereferences to networkgatewaydevices
op.execute("INSERT INTO networkgatewaydevices SELECT "
"id, network_gateway_id, interface_name FROM "
"networkgatewaydevicereferences")
# Dropt networkgatewaydevicereferences
op.drop_table('networkgatewaydevicereferences')
| zhhf/charging | charging/db/migration/alembic_migrations/versions/19180cf98af6_nsx_gw_devices.py | Python | apache-2.0 | 4,178 |
package es.ucm.fdi.iw.controller;
import java.security.Principal;
import java.util.ArrayList;
import java.util.List;
import javax.persistence.EntityManager;
import javax.servlet.http.HttpSession;
import org.apache.log4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.web.servletapi.SecurityContextHolderAwareRequestWrapper;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import com.google.gson.Gson;
import es.ucm.fdi.iw.model.Intercambio;
import es.ucm.fdi.iw.model.Usuario;
import es.ucm.fdi.iw.model.UsuarioJSON;
@Controller
@RequestMapping("home")
public class HomeController {
private static Logger log = Logger.getLogger(HomeController.class);
@Autowired
private EntityManager entityManager;
// Incluimos ${prefix} en todas las páginas
@ModelAttribute
public void addAttributes(Model m) {
m.addAttribute("prefix", "../static/");
m.addAttribute("prefijo", "../");
}
@GetMapping({ "", "/" })
public String root(Model model, Principal principal, HttpSession session,
SecurityContextHolderAwareRequestWrapper request) {
añadirCSSyJSAlModelo(model);
Usuario usuarioActual = (Usuario) entityManager.createNamedQuery("userByUserField")
.setParameter("userParam", principal.getName()).getSingleResult();
if (principal != null && session.getAttribute("user") == null) {
try {
if (!usuarioActual.isActivo()){
throw new Exception();
}
session.setAttribute("user", usuarioActual);
} catch (Exception e) {
log.info("No such user: " + principal.getName());
return "redirect:index";
}
}
@SuppressWarnings("unchecked")
ArrayList<Usuario> usuarios = (ArrayList<Usuario>) entityManager.createNamedQuery("getActiveUsers")
.setParameter("roleParam", "USER").setParameter("activeParam", true)
.setParameter("actual", principal.getName()).getResultList();
Gson gson = new Gson();
String json = "{";
json +="\"usuarios\":[";
for(Usuario u : usuarios)
{
UsuarioJSON usuarioJSON = new UsuarioJSON(u);
json += gson.toJson(usuarioJSON);
if(usuarios.indexOf(u) != usuarios.size()- 1)
{
json+= ',';
}
}
json += "]}";
model.addAttribute("usuariosJSON",json);
model.addAttribute("usuarios", usuarios);
if (request.isUserInRole("ROLE_ADMIN"))
return "redirect:admin";
//Enviamos al modelo el usuarioActual (en JSON y normal)
añadirUsuarioActualJSON(model, usuarioActual);
model.addAttribute("usuarioActual",usuarioActual);
mensajesPendientes(model,usuarioActual);
return "home";
}
private void añadirUsuarioActualJSON(Model model, Usuario usuarioActual)
{
UsuarioJSON usuarioActualJSON = new UsuarioJSON(usuarioActual);
Gson gson = new Gson();
String jsonAux = gson.toJson(usuarioActualJSON);
model.addAttribute("usuarioActualJSON", jsonAux);
}
@SuppressWarnings("unchecked")
private void mensajesPendientes(Model model, Usuario usuarioActual)
{
List<Intercambio> intercambios = entityManager.createNamedQuery("allIntercambiosUsuarioPendiente")
.setParameter("estado", "Pendiente")
.setParameter("user", usuarioActual)
.getResultList();
model.addAttribute("numeroDeMensajes",intercambios.size());
}
public static void añadirCSSyJSAlModelo(Model model) {
List<String> listaCSS = new ArrayList<String>();
listaCSS.add("styleHome.css");
listaCSS.add("popup.css");
listaCSS.add("star-rating.min.css");
List<String> listaJS = new ArrayList<String>();
listaJS.add("jquery-3.1.1.min.js");
listaJS.add("jquery-ui-1.12.1/jquery-ui.min.js");
listaJS.add("bootstrap.min.js");
listaJS.add("star-rating.min.js");
listaJS.add("home.js");
model.addAttribute("pageExtraCSS", listaCSS);
model.addAttribute("pageExtraScripts", listaJS);
}
}
| lorenpaz/CardEx | src/main/java/es/ucm/fdi/iw/controller/HomeController.java | Java | apache-2.0 | 4,005 |
package org.apache.hadoop.mapred.spatial;
import java.io.IOException;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.RecordWriter;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.spatial.CellInfo;
import org.apache.hadoop.spatial.Shape;
public class RTreeGridRecordWriter
extends org.apache.hadoop.spatial.RTreeGridRecordWriter<Shape>
implements RecordWriter<IntWritable, Text> {
public RTreeGridRecordWriter(FileSystem fileSystem, Path outFile, CellInfo[] cells, boolean overwrite) throws IOException {
super(fileSystem, outFile, cells, overwrite);
}
@Override
public void write(IntWritable key, Text value) throws IOException {
super.write(key.get(), value);
}
@Override
public void close(Reporter reporter) throws IOException {
super.close(reporter);
}
}
| aseldawy/spatialhadoop | src/mapred/org/apache/hadoop/mapred/spatial/RTreeGridRecordWriter.java | Java | apache-2.0 | 955 |
/*******************************************************************************
* Copyright 2019 Tremolo Security, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package com.tremolosecurity.proxy;
/**
*
* @author mlb
* Provides an interface to extend a session termination from an external source instead of the built in session variables
*/
public interface ExternalSessionExpires {
/**
*
* @return The expiration date/time in standard java form (milliseconds since epoch)
*/
public long getExpires();
}
| TremoloSecurity/OpenUnison | unison/unison-sdk/src/main/java/com/tremolosecurity/proxy/ExternalSessionExpires.java | Java | apache-2.0 | 1,124 |
from datetime import datetime
import random
import string
from bson import ObjectId
class DuplicateUserException(Exception):
def __init__(self, message='User name/email already exits'):
Exception.__init__(self, message)
pass
class UserServiceException(Exception):
def __init__(self, message=None):
Exception.__init__(self, message)
@classmethod
def cannot_delete_super_admin(cls):
return UserServiceException("Cannot delete super admin user!")
class UserService(object):
def __init__(self, db):
self.db = db
self.users = self.db.user_collection
def generate_api_key(self):
s = string.ascii_letters + string.digits
return ''.join(random.sample(s, 20))
def create(self, item):
if self.user_exists(item['email']):
raise DuplicateUserException()
item.pop('_id', None)
item['created_at'] = datetime.now()
item['status'] = True
if 'api_key' not in item:
item['api_key'] = self.generate_api_key()
if 'roles' not in item or item['roles'] is None or len(item['roles']) == 0:
item['roles'] = ['member']
return self.users.insert(item)
def get_by_email(self, email):
return self.users.find_one({"email": email})
def validate_user(self, username, password):
query = {'email': username, 'password': password}
return self.users.find(query).count() > 0
def search(self, email=None):
query = {}
if email is not None:
query['email'] = email
return [x for x in self.users.find(query)]
def delete(self, id):
item = self.get_by_id(id)
if item and 'roles' in item and item['roles'] is not None and 'super_admin' in item['roles']:
raise UserServiceException.cannot_delete_super_admin()
return self.users.remove({"_id": ObjectId(id)})
def get_by_id(self, id):
return self.users.find_one({"_id": ObjectId(id)})
def get_by_api_key(self, api_key):
return self.users.find_one({"api_key": api_key})
def update(self, item):
if item['_id'] is None:
return item
if self.user_exists(item['email'], str(item['_id'])):
raise DuplicateUserException()
item['updated_at'] = datetime.now()
self.users.save(item)
return item
def user_exists(self, email, id=None):
query = {}
if id is not None:
query = {"_id": {"$ne": ObjectId(id)}}
query['email'] = email
return self.users.find(query).count() > 0
| cackharot/geosnap-server | src/geosnap/service/UserService.py | Python | apache-2.0 | 2,611 |
package org.apache.lucene.index;
/**
* Copyright 2004 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.*;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BitVector;
import org.apache.lucene.search.DefaultSimilarity;
/**
* @version $Id: SegmentReader.java 329523 2005-10-30 05:37:11Z yonik $
*/
class SegmentReader extends IndexReader {
private String segment;
FieldInfos fieldInfos;
private FieldsReader fieldsReader;
TermInfosReader tis;
TermVectorsReader termVectorsReaderOrig = null;
ThreadLocal termVectorsLocal = new ThreadLocal();
BitVector deletedDocs = null;
private boolean deletedDocsDirty = false;
private boolean normsDirty = false;
private boolean undeleteAll = false;
IndexInput freqStream;
IndexInput proxStream;
// Compound File Reader when based on a compound file segment
CompoundFileReader cfsReader = null;
private class Norm {
public Norm(IndexInput in, int number)
{
this.in = in;
this.number = number;
}
private IndexInput in;
private byte[] bytes;
private boolean dirty;
private int number;
private void reWrite() throws IOException {
// NOTE: norms are re-written in regular directory, not cfs
IndexOutput out = directory().createOutput(segment + ".tmp");
try {
out.writeBytes(bytes, maxDoc());
} finally {
out.close();
}
String fileName;
if(cfsReader == null)
fileName = segment + ".f" + number;
else{
// use a different file name if we have compound format
fileName = segment + ".s" + number;
}
directory().renameFile(segment + ".tmp", fileName);
this.dirty = false;
}
}
private Hashtable norms = new Hashtable();
/** The class which implements SegmentReader. */
private static Class IMPL;
static {
try {
String name =
System.getProperty("org.apache.lucene.SegmentReader.class",
SegmentReader.class.getName());
IMPL = Class.forName(name);
} catch (ClassNotFoundException e) {
throw new RuntimeException("cannot load SegmentReader class: " + e);
} catch (SecurityException se) {
try {
IMPL = Class.forName(SegmentReader.class.getName());
} catch (ClassNotFoundException e) {
throw new RuntimeException("cannot load default SegmentReader class: " + e);
}
}
}
protected SegmentReader() { super(null); }
public static SegmentReader get(SegmentInfo si) throws IOException {
return get(si.dir, si, null, false, false);
}
public static SegmentReader get(SegmentInfos sis, SegmentInfo si,
boolean closeDir) throws IOException {
return get(si.dir, si, sis, closeDir, true);
}
public static SegmentReader get(Directory dir, SegmentInfo si,
SegmentInfos sis,
boolean closeDir, boolean ownDir)
throws IOException {
SegmentReader instance;
try {
instance = (SegmentReader)IMPL.newInstance();
} catch (Exception e) {
throw new RuntimeException("cannot load SegmentReader class: " + e);
}
instance.init(dir, sis, closeDir, ownDir);
instance.initialize(si);
return instance;
}
private void initialize(SegmentInfo si) throws IOException {
segment = si.name;
// Use compound file directory for some files, if it exists
Directory cfsDir = directory();
if (directory().fileExists(segment + ".cfs")) {
cfsReader = new CompoundFileReader(directory(), segment + ".cfs");
cfsDir = cfsReader;
}
// No compound file exists - use the multi-file format
fieldInfos = new FieldInfos(cfsDir, segment + ".fnm");
fieldsReader = new FieldsReader(cfsDir, segment, fieldInfos);
tis = new TermInfosReader(cfsDir, segment, fieldInfos);
// NOTE: the bitvector is stored using the regular directory, not cfs
if (hasDeletions(si))
deletedDocs = new BitVector(directory(), segment + ".del");
// make sure that all index files have been read or are kept open
// so that if an index update removes them we'll still have them
freqStream = cfsDir.openInput(segment + ".frq");
proxStream = cfsDir.openInput(segment + ".prx");
openNorms(cfsDir);
if (fieldInfos.hasVectors()) { // open term vector files only as needed
termVectorsReaderOrig = new TermVectorsReader(cfsDir, segment, fieldInfos);
}
}
protected void finalize() {
// patch for pre-1.4.2 JVMs, whose ThreadLocals leak
termVectorsLocal.set(null);
super.finalize();
}
protected void doCommit() throws IOException {
if (deletedDocsDirty) { // re-write deleted
deletedDocs.write(directory(), segment + ".tmp");
directory().renameFile(segment + ".tmp", segment + ".del");
}
if(undeleteAll && directory().fileExists(segment + ".del")){
directory().deleteFile(segment + ".del");
}
if (normsDirty) { // re-write norms
Enumeration values = norms.elements();
while (values.hasMoreElements()) {
Norm norm = (Norm) values.nextElement();
if (norm.dirty) {
norm.reWrite();
}
}
}
deletedDocsDirty = false;
normsDirty = false;
undeleteAll = false;
}
protected void doClose() throws IOException {
fieldsReader.close();
tis.close();
if (freqStream != null)
freqStream.close();
if (proxStream != null)
proxStream.close();
closeNorms();
if (termVectorsReaderOrig != null)
termVectorsReaderOrig.close();
if (cfsReader != null)
cfsReader.close();
}
static boolean hasDeletions(SegmentInfo si) throws IOException {
return si.dir.fileExists(si.name + ".del");
}
public boolean hasDeletions() {
return deletedDocs != null;
}
static boolean usesCompoundFile(SegmentInfo si) throws IOException {
return si.dir.fileExists(si.name + ".cfs");
}
static boolean hasSeparateNorms(SegmentInfo si) throws IOException {
String[] result = si.dir.list();
String pattern = si.name + ".s";
int patternLength = pattern.length();
for(int i = 0; i < result.length; i++){
if(result[i].startsWith(pattern) && Character.isDigit(result[i].charAt(patternLength)))
return true;
}
return false;
}
protected void doDelete(int docNum) {
if (deletedDocs == null)
deletedDocs = new BitVector(maxDoc());
deletedDocsDirty = true;
undeleteAll = false;
deletedDocs.set(docNum);
}
protected void doUndeleteAll() {
deletedDocs = null;
deletedDocsDirty = false;
undeleteAll = true;
}
Vector files() throws IOException {
Vector files = new Vector(16);
for (int i = 0; i < IndexFileNames.INDEX_EXTENSIONS.length; i++) {
String name = segment + "." + IndexFileNames.INDEX_EXTENSIONS[i];
if (directory().fileExists(name))
files.addElement(name);
}
for (int i = 0; i < fieldInfos.size(); i++) {
FieldInfo fi = fieldInfos.fieldInfo(i);
if (fi.isIndexed && !fi.omitNorms){
String name;
if(cfsReader == null)
name = segment + ".f" + i;
else
name = segment + ".s" + i;
if (directory().fileExists(name))
files.addElement(name);
}
}
return files;
}
public TermEnum terms() {
return tis.terms();
}
public TermEnum terms(Term t) throws IOException {
return tis.terms(t);
}
public synchronized Document document(int n) throws IOException {
if (isDeleted(n))
throw new IllegalArgumentException
("attempt to access a deleted document");
return fieldsReader.doc(n);
}
public synchronized boolean isDeleted(int n) {
return (deletedDocs != null && deletedDocs.get(n));
}
public TermDocs termDocs() throws IOException {
return new SegmentTermDocs(this);
}
public TermPositions termPositions() throws IOException {
return new SegmentTermPositions(this);
}
public int docFreq(Term t) throws IOException {
TermInfo ti = tis.get(t);
if (ti != null)
return ti.docFreq;
else
return 0;
}
public int numDocs() {
int n = maxDoc();
if (deletedDocs != null)
n -= deletedDocs.count();
return n;
}
public int maxDoc() {
return fieldsReader.size();
}
/**
* @see IndexReader#getFieldNames()
* @deprecated Replaced by {@link #getFieldNames (IndexReader.FieldOption fldOption)}
*/
public Collection getFieldNames() {
// maintain a unique set of field names
Set fieldSet = new HashSet();
for (int i = 0; i < fieldInfos.size(); i++) {
FieldInfo fi = fieldInfos.fieldInfo(i);
fieldSet.add(fi.name);
}
return fieldSet;
}
/**
* @see IndexReader#getFieldNames(boolean)
* @deprecated Replaced by {@link #getFieldNames (IndexReader.FieldOption fldOption)}
*/
public Collection getFieldNames(boolean indexed) {
// maintain a unique set of field names
Set fieldSet = new HashSet();
for (int i = 0; i < fieldInfos.size(); i++) {
FieldInfo fi = fieldInfos.fieldInfo(i);
if (fi.isIndexed == indexed)
fieldSet.add(fi.name);
}
return fieldSet;
}
/**
* @see IndexReader#getIndexedFieldNames(Field.TermVector tvSpec)
* @deprecated Replaced by {@link #getFieldNames (IndexReader.FieldOption fldOption)}
*/
public Collection getIndexedFieldNames (Field.TermVector tvSpec){
boolean storedTermVector;
boolean storePositionWithTermVector;
boolean storeOffsetWithTermVector;
if(tvSpec == Field.TermVector.NO){
storedTermVector = false;
storePositionWithTermVector = false;
storeOffsetWithTermVector = false;
}
else if(tvSpec == Field.TermVector.YES){
storedTermVector = true;
storePositionWithTermVector = false;
storeOffsetWithTermVector = false;
}
else if(tvSpec == Field.TermVector.WITH_POSITIONS){
storedTermVector = true;
storePositionWithTermVector = true;
storeOffsetWithTermVector = false;
}
else if(tvSpec == Field.TermVector.WITH_OFFSETS){
storedTermVector = true;
storePositionWithTermVector = false;
storeOffsetWithTermVector = true;
}
else if(tvSpec == Field.TermVector.WITH_POSITIONS_OFFSETS){
storedTermVector = true;
storePositionWithTermVector = true;
storeOffsetWithTermVector = true;
}
else{
throw new IllegalArgumentException("unknown termVector parameter " + tvSpec);
}
// maintain a unique set of field names
Set fieldSet = new HashSet();
for (int i = 0; i < fieldInfos.size(); i++) {
FieldInfo fi = fieldInfos.fieldInfo(i);
if (fi.isIndexed && fi.storeTermVector == storedTermVector &&
fi.storePositionWithTermVector == storePositionWithTermVector &&
fi.storeOffsetWithTermVector == storeOffsetWithTermVector){
fieldSet.add(fi.name);
}
}
return fieldSet;
}
/**
* @see IndexReader#getFieldNames(IndexReader.FieldOption fldOption)
*/
public Collection getFieldNames(IndexReader.FieldOption fieldOption) {
Set fieldSet = new HashSet();
for (int i = 0; i < fieldInfos.size(); i++) {
FieldInfo fi = fieldInfos.fieldInfo(i);
if (fieldOption == IndexReader.FieldOption.ALL) {
fieldSet.add(fi.name);
}
else if (!fi.isIndexed && fieldOption == IndexReader.FieldOption.UNINDEXED) {
fieldSet.add(fi.name);
}
else if (fi.isIndexed && fieldOption == IndexReader.FieldOption.INDEXED) {
fieldSet.add(fi.name);
}
else if (fi.isIndexed && fi.storeTermVector == false && fieldOption == IndexReader.FieldOption.INDEXED_NO_TERMVECTOR) {
fieldSet.add(fi.name);
}
else if (fi.storeTermVector == true &&
fi.storePositionWithTermVector == false &&
fi.storeOffsetWithTermVector == false &&
fieldOption == IndexReader.FieldOption.TERMVECTOR) {
fieldSet.add(fi.name);
}
else if (fi.isIndexed && fi.storeTermVector && fieldOption == IndexReader.FieldOption.INDEXED_WITH_TERMVECTOR) {
fieldSet.add(fi.name);
}
else if (fi.storePositionWithTermVector && fi.storeOffsetWithTermVector == false && fieldOption == IndexReader.FieldOption.TERMVECTOR_WITH_POSITION) {
fieldSet.add(fi.name);
}
else if (fi.storeOffsetWithTermVector && fi.storePositionWithTermVector == false && fieldOption == IndexReader.FieldOption.TERMVECTOR_WITH_OFFSET) {
fieldSet.add(fi.name);
}
else if ((fi.storeOffsetWithTermVector && fi.storePositionWithTermVector) &&
fieldOption == IndexReader.FieldOption.TERMVECTOR_WITH_POSITION_OFFSET) {
fieldSet.add(fi.name);
}
}
return fieldSet;
}
public synchronized boolean hasNorms(String field) {
return norms.containsKey(field);
}
static byte[] createFakeNorms(int size) {
byte[] ones = new byte[size];
Arrays.fill(ones, DefaultSimilarity.encodeNorm(1.0f));
return ones;
}
private byte[] ones;
private byte[] fakeNorms() {
if (ones==null) ones=createFakeNorms(maxDoc());
return ones;
}
// can return null if norms aren't stored
protected synchronized byte[] getNorms(String field) throws IOException {
Norm norm = (Norm) norms.get(field);
if (norm == null) return null; // not indexed, or norms not stored
if (norm.bytes == null) { // value not yet read
byte[] bytes = new byte[maxDoc()];
norms(field, bytes, 0);
norm.bytes = bytes; // cache it
}
return norm.bytes;
}
// returns fake norms if norms aren't available
public synchronized byte[] norms(String field) throws IOException {
byte[] bytes = getNorms(field);
if (bytes==null) bytes=fakeNorms();
return bytes;
}
protected void doSetNorm(int doc, String field, byte value)
throws IOException {
Norm norm = (Norm) norms.get(field);
if (norm == null) // not an indexed field
return;
norm.dirty = true; // mark it dirty
normsDirty = true;
norms(field)[doc] = value; // set the value
}
/** Read norms into a pre-allocated array. */
public synchronized void norms(String field, byte[] bytes, int offset)
throws IOException {
Norm norm = (Norm) norms.get(field);
if (norm == null) {
System.arraycopy(fakeNorms(), 0, bytes, offset, maxDoc());
return;
}
if (norm.bytes != null) { // can copy from cache
System.arraycopy(norm.bytes, 0, bytes, offset, maxDoc());
return;
}
IndexInput normStream = (IndexInput) norm.in.clone();
try { // read from disk
normStream.seek(0);
normStream.readBytes(bytes, offset, maxDoc());
} finally {
normStream.close();
}
}
private void openNorms(Directory cfsDir) throws IOException {
for (int i = 0; i < fieldInfos.size(); i++) {
FieldInfo fi = fieldInfos.fieldInfo(i);
if (fi.isIndexed && !fi.omitNorms) {
// look first if there are separate norms in compound format
String fileName = segment + ".s" + fi.number;
Directory d = directory();
if(!d.fileExists(fileName)){
fileName = segment + ".f" + fi.number;
d = cfsDir;
}
norms.put(fi.name, new Norm(d.openInput(fileName), fi.number));
}
}
}
private void closeNorms() throws IOException {
synchronized (norms) {
Enumeration enumerator = norms.elements();
while (enumerator.hasMoreElements()) {
Norm norm = (Norm) enumerator.nextElement();
norm.in.close();
}
}
}
/**
* Create a clone from the initial TermVectorsReader and store it in the ThreadLocal.
* @return TermVectorsReader
*/
private TermVectorsReader getTermVectorsReader() {
TermVectorsReader tvReader = (TermVectorsReader)termVectorsLocal.get();
if (tvReader == null) {
tvReader = (TermVectorsReader)termVectorsReaderOrig.clone();
termVectorsLocal.set(tvReader);
}
return tvReader;
}
/** Return a term frequency vector for the specified document and field. The
* vector returned contains term numbers and frequencies for all terms in
* the specified field of this document, if the field had storeTermVector
* flag set. If the flag was not set, the method returns null.
* @throws IOException
*/
public TermFreqVector getTermFreqVector(int docNumber, String field) throws IOException {
// Check if this field is invalid or has no stored term vector
FieldInfo fi = fieldInfos.fieldInfo(field);
if (fi == null || !fi.storeTermVector || termVectorsReaderOrig == null)
return null;
TermVectorsReader termVectorsReader = getTermVectorsReader();
if (termVectorsReader == null)
return null;
return termVectorsReader.get(docNumber, field);
}
/** Return an array of term frequency vectors for the specified document.
* The array contains a vector for each vectorized field in the document.
* Each vector vector contains term numbers and frequencies for all terms
* in a given vectorized field.
* If no such fields existed, the method returns null.
* @throws IOException
*/
public TermFreqVector[] getTermFreqVectors(int docNumber) throws IOException {
if (termVectorsReaderOrig == null)
return null;
TermVectorsReader termVectorsReader = getTermVectorsReader();
if (termVectorsReader == null)
return null;
return termVectorsReader.get(docNumber);
}
}
| lpxz/grail-lucene358684 | src/java/org/apache/lucene/index/SegmentReader.java | Java | apache-2.0 | 18,645 |
from abc import ABCMeta, abstractmethod, abstractproperty
from contextlib import contextmanager
from functools import wraps
import gzip
from inspect import getargspec
from itertools import (
combinations,
count,
product,
)
import operator
import os
from os.path import abspath, dirname, join, realpath
import shutil
from sys import _getframe
import tempfile
from logbook import TestHandler
from mock import patch
from nose.tools import nottest
from numpy.testing import assert_allclose, assert_array_equal
import pandas as pd
from six import itervalues, iteritems, with_metaclass
from six.moves import filter, map
from sqlalchemy import create_engine
from testfixtures import TempDirectory
from toolz import concat, curry
from zipline.assets import AssetFinder, AssetDBWriter
from zipline.assets.synthetic import make_simple_equity_info
from zipline.data.data_portal import DataPortal
from zipline.data.loader import get_benchmark_filename, INDEX_MAPPING
from zipline.data.minute_bars import (
BcolzMinuteBarReader,
BcolzMinuteBarWriter,
US_EQUITIES_MINUTES_PER_DAY
)
from zipline.data.us_equity_pricing import (
BcolzDailyBarReader,
BcolzDailyBarWriter,
SQLiteAdjustmentWriter,
)
from zipline.finance.blotter import Blotter
from zipline.finance.trading import TradingEnvironment
from zipline.finance.order import ORDER_STATUS
from zipline.lib.labelarray import LabelArray
from zipline.pipeline.data import USEquityPricing
from zipline.pipeline.engine import SimplePipelineEngine
from zipline.pipeline.factors import CustomFactor
from zipline.pipeline.loaders.testing import make_seeded_random_loader
from zipline.utils import security_list
from zipline.utils.calendars import get_calendar
from zipline.utils.input_validation import expect_dimensions
from zipline.utils.numpy_utils import as_column, isnat
from zipline.utils.pandas_utils import timedelta_to_integral_seconds
from zipline.utils.paths import ensure_directory
from zipline.utils.sentinel import sentinel
import numpy as np
from numpy import float64
EPOCH = pd.Timestamp(0, tz='UTC')
def seconds_to_timestamp(seconds):
return pd.Timestamp(seconds, unit='s', tz='UTC')
def to_utc(time_str):
"""Convert a string in US/Eastern time to UTC"""
return pd.Timestamp(time_str, tz='US/Eastern').tz_convert('UTC')
def str_to_seconds(s):
"""
Convert a pandas-intelligible string to (integer) seconds since UTC.
>>> from pandas import Timestamp
>>> (Timestamp('2014-01-01') - Timestamp(0)).total_seconds()
1388534400.0
>>> str_to_seconds('2014-01-01')
1388534400
"""
return timedelta_to_integral_seconds(pd.Timestamp(s, tz='UTC') - EPOCH)
def drain_zipline(test, zipline):
output = []
transaction_count = 0
msg_counter = 0
# start the simulation
for update in zipline:
msg_counter += 1
output.append(update)
if 'daily_perf' in update:
transaction_count += \
len(update['daily_perf']['transactions'])
return output, transaction_count
def check_algo_results(test,
results,
expected_transactions_count=None,
expected_order_count=None,
expected_positions_count=None,
sid=None):
if expected_transactions_count is not None:
txns = flatten_list(results["transactions"])
test.assertEqual(expected_transactions_count, len(txns))
if expected_positions_count is not None:
raise NotImplementedError
if expected_order_count is not None:
# de-dup orders on id, because orders are put back into perf packets
# whenever they a txn is filled
orders = set([order['id'] for order in
flatten_list(results["orders"])])
test.assertEqual(expected_order_count, len(orders))
def flatten_list(list):
return [item for sublist in list for item in sublist]
def assert_single_position(test, zipline):
output, transaction_count = drain_zipline(test, zipline)
if 'expected_transactions' in test.zipline_test_config:
test.assertEqual(
test.zipline_test_config['expected_transactions'],
transaction_count
)
else:
test.assertEqual(
test.zipline_test_config['order_count'],
transaction_count
)
# the final message is the risk report, the second to
# last is the final day's results. Positions is a list of
# dicts.
closing_positions = output[-2]['daily_perf']['positions']
# confirm that all orders were filled.
# iterate over the output updates, overwriting
# orders when they are updated. Then check the status on all.
orders_by_id = {}
for update in output:
if 'daily_perf' in update:
if 'orders' in update['daily_perf']:
for order in update['daily_perf']['orders']:
orders_by_id[order['id']] = order
for order in itervalues(orders_by_id):
test.assertEqual(
order['status'],
ORDER_STATUS.FILLED,
"")
test.assertEqual(
len(closing_positions),
1,
"Portfolio should have one position."
)
sid = test.zipline_test_config['sid']
test.assertEqual(
closing_positions[0]['sid'],
sid,
"Portfolio should have one position in " + str(sid)
)
return output, transaction_count
@contextmanager
def security_list_copy():
old_dir = security_list.SECURITY_LISTS_DIR
new_dir = tempfile.mkdtemp()
try:
for subdir in os.listdir(old_dir):
shutil.copytree(os.path.join(old_dir, subdir),
os.path.join(new_dir, subdir))
with patch.object(security_list, 'SECURITY_LISTS_DIR', new_dir), \
patch.object(security_list, 'using_copy', True,
create=True):
yield
finally:
shutil.rmtree(new_dir, True)
def add_security_data(adds, deletes):
if not hasattr(security_list, 'using_copy'):
raise Exception('add_security_data must be used within '
'security_list_copy context')
directory = os.path.join(
security_list.SECURITY_LISTS_DIR,
"leveraged_etf_list/20150127/20150125"
)
if not os.path.exists(directory):
os.makedirs(directory)
del_path = os.path.join(directory, "delete")
with open(del_path, 'w') as f:
for sym in deletes:
f.write(sym)
f.write('\n')
add_path = os.path.join(directory, "add")
with open(add_path, 'w') as f:
for sym in adds:
f.write(sym)
f.write('\n')
def all_pairs_matching_predicate(values, pred):
"""
Return an iterator of all pairs, (v0, v1) from values such that
`pred(v0, v1) == True`
Parameters
----------
values : iterable
pred : function
Returns
-------
pairs_iterator : generator
Generator yielding pairs matching `pred`.
Examples
--------
>>> from zipline.testing import all_pairs_matching_predicate
>>> from operator import eq, lt
>>> list(all_pairs_matching_predicate(range(5), eq))
[(0, 0), (1, 1), (2, 2), (3, 3), (4, 4)]
>>> list(all_pairs_matching_predicate("abcd", lt))
[('a', 'b'), ('a', 'c'), ('a', 'd'), ('b', 'c'), ('b', 'd'), ('c', 'd')]
"""
return filter(lambda pair: pred(*pair), product(values, repeat=2))
def product_upper_triangle(values, include_diagonal=False):
"""
Return an iterator over pairs, (v0, v1), drawn from values.
If `include_diagonal` is True, returns all pairs such that v0 <= v1.
If `include_diagonal` is False, returns all pairs such that v0 < v1.
"""
return all_pairs_matching_predicate(
values,
operator.le if include_diagonal else operator.lt,
)
def all_subindices(index):
"""
Return all valid sub-indices of a pandas Index.
"""
return (
index[start:stop]
for start, stop in product_upper_triangle(range(len(index) + 1))
)
def chrange(start, stop):
"""
Construct an iterable of length-1 strings beginning with `start` and ending
with `stop`.
Parameters
----------
start : str
The first character.
stop : str
The last character.
Returns
-------
chars: iterable[str]
Iterable of strings beginning with start and ending with stop.
Examples
--------
>>> chrange('A', 'C')
['A', 'B', 'C']
"""
return list(map(chr, range(ord(start), ord(stop) + 1)))
def make_trade_data_for_asset_info(dates,
asset_info,
price_start,
price_step_by_date,
price_step_by_sid,
volume_start,
volume_step_by_date,
volume_step_by_sid,
frequency,
writer=None):
"""
Convert the asset info dataframe into a dataframe of trade data for each
sid, and write to the writer if provided. Write NaNs for locations where
assets did not exist. Return a dict of the dataframes, keyed by sid.
"""
trade_data = {}
sids = asset_info.index
price_sid_deltas = np.arange(len(sids), dtype=float64) * price_step_by_sid
price_date_deltas = (np.arange(len(dates), dtype=float64) *
price_step_by_date)
prices = (price_sid_deltas + as_column(price_date_deltas)) + price_start
volume_sid_deltas = np.arange(len(sids)) * volume_step_by_sid
volume_date_deltas = np.arange(len(dates)) * volume_step_by_date
volumes = volume_sid_deltas + as_column(volume_date_deltas) + volume_start
for j, sid in enumerate(sids):
start_date, end_date = asset_info.loc[sid, ['start_date', 'end_date']]
# Normalize here so the we still generate non-NaN values on the minutes
# for an asset's last trading day.
for i, date in enumerate(dates.normalize()):
if not (start_date <= date <= end_date):
prices[i, j] = 0
volumes[i, j] = 0
df = pd.DataFrame(
{
"open": prices[:, j],
"high": prices[:, j],
"low": prices[:, j],
"close": prices[:, j],
"volume": volumes[:, j],
},
index=dates,
)
if writer:
writer.write_sid(sid, df)
trade_data[sid] = df
return trade_data
def check_allclose(actual,
desired,
rtol=1e-07,
atol=0,
err_msg='',
verbose=True):
"""
Wrapper around np.testing.assert_allclose that also verifies that inputs
are ndarrays.
See Also
--------
np.assert_allclose
"""
if type(actual) != type(desired):
raise AssertionError("%s != %s" % (type(actual), type(desired)))
return assert_allclose(
actual,
desired,
atol=atol,
rtol=rtol,
err_msg=err_msg,
verbose=verbose,
)
def check_arrays(x, y, err_msg='', verbose=True, check_dtypes=True):
"""
Wrapper around np.testing.assert_array_equal that also verifies that inputs
are ndarrays.
See Also
--------
np.assert_array_equal
"""
assert type(x) == type(y), "{x} != {y}".format(x=type(x), y=type(y))
assert x.dtype == y.dtype, "{x.dtype} != {y.dtype}".format(x=x, y=y)
if isinstance(x, LabelArray):
# Check that both arrays have missing values in the same locations...
assert_array_equal(
x.is_missing(),
y.is_missing(),
err_msg=err_msg,
verbose=verbose,
)
# ...then check the actual values as well.
x = x.as_string_array()
y = y.as_string_array()
elif x.dtype.kind in 'mM':
x_isnat = isnat(x)
y_isnat = isnat(y)
assert_array_equal(
x_isnat,
y_isnat,
err_msg="NaTs not equal",
verbose=verbose,
)
# Fill NaTs with zero for comparison.
x = np.where(x_isnat, np.zeros_like(x), x)
y = np.where(y_isnat, np.zeros_like(y), y)
return assert_array_equal(x, y, err_msg=err_msg, verbose=verbose)
class UnexpectedAttributeAccess(Exception):
pass
class ExplodingObject(object):
"""
Object that will raise an exception on any attribute access.
Useful for verifying that an object is never touched during a
function/method call.
"""
def __getattribute__(self, name):
raise UnexpectedAttributeAccess(name)
def write_minute_data(trading_calendar, tempdir, minutes, sids):
first_session = trading_calendar.minute_to_session_label(
minutes[0], direction="none"
)
last_session = trading_calendar.minute_to_session_label(
minutes[-1], direction="none"
)
sessions = trading_calendar.sessions_in_range(first_session, last_session)
write_bcolz_minute_data(
trading_calendar,
sessions,
tempdir.path,
create_minute_bar_data(minutes, sids),
)
return tempdir.path
def create_minute_bar_data(minutes, sids):
length = len(minutes)
for sid_idx, sid in enumerate(sids):
yield sid, pd.DataFrame(
{
'open': np.arange(length) + 10 + sid_idx,
'high': np.arange(length) + 15 + sid_idx,
'low': np.arange(length) + 8 + sid_idx,
'close': np.arange(length) + 10 + sid_idx,
'volume': 100 + sid_idx,
},
index=minutes,
)
def create_daily_bar_data(sessions, sids):
length = len(sessions)
for sid_idx, sid in enumerate(sids):
yield sid, pd.DataFrame(
{
"open": (np.array(range(10, 10 + length)) + sid_idx),
"high": (np.array(range(15, 15 + length)) + sid_idx),
"low": (np.array(range(8, 8 + length)) + sid_idx),
"close": (np.array(range(10, 10 + length)) + sid_idx),
"volume": np.array(range(100, 100 + length)) + sid_idx,
"day": [session.value for session in sessions]
},
index=sessions,
)
def write_daily_data(tempdir, sim_params, sids, trading_calendar):
path = os.path.join(tempdir.path, "testdaily.bcolz")
BcolzDailyBarWriter(path, trading_calendar,
sim_params.start_session,
sim_params.end_session).write(
create_daily_bar_data(sim_params.sessions, sids),
)
return path
def create_data_portal(asset_finder, tempdir, sim_params, sids,
trading_calendar, adjustment_reader=None):
if sim_params.data_frequency == "daily":
daily_path = write_daily_data(tempdir, sim_params, sids,
trading_calendar)
equity_daily_reader = BcolzDailyBarReader(daily_path)
return DataPortal(
asset_finder, trading_calendar,
first_trading_day=equity_daily_reader.first_trading_day,
equity_daily_reader=equity_daily_reader,
adjustment_reader=adjustment_reader
)
else:
minutes = trading_calendar.minutes_in_range(
sim_params.first_open,
sim_params.last_close
)
minute_path = write_minute_data(trading_calendar, tempdir, minutes,
sids)
equity_minute_reader = BcolzMinuteBarReader(minute_path)
return DataPortal(
asset_finder, trading_calendar,
first_trading_day=equity_minute_reader.first_trading_day,
equity_minute_reader=equity_minute_reader,
adjustment_reader=adjustment_reader
)
def write_bcolz_minute_data(trading_calendar, days, path, data):
BcolzMinuteBarWriter(
path,
trading_calendar,
days[0],
days[-1],
US_EQUITIES_MINUTES_PER_DAY
).write(data)
def create_minute_df_for_asset(trading_calendar,
start_dt,
end_dt,
interval=1,
start_val=1,
minute_blacklist=None):
asset_minutes = trading_calendar.minutes_for_sessions_in_range(
start_dt, end_dt
)
minutes_count = len(asset_minutes)
minutes_arr = np.array(range(start_val, start_val + minutes_count))
df = pd.DataFrame(
{
"open": minutes_arr + 1,
"high": minutes_arr + 2,
"low": minutes_arr - 1,
"close": minutes_arr,
"volume": 100 * minutes_arr,
},
index=asset_minutes,
)
if interval > 1:
counter = 0
while counter < len(minutes_arr):
df[counter:(counter + interval - 1)] = 0
counter += interval
if minute_blacklist is not None:
for minute in minute_blacklist:
df.loc[minute] = 0
return df
def create_daily_df_for_asset(trading_calendar, start_day, end_day,
interval=1):
days = trading_calendar.sessions_in_range(start_day, end_day)
days_count = len(days)
days_arr = np.arange(days_count) + 2
df = pd.DataFrame(
{
"open": days_arr + 1,
"high": days_arr + 2,
"low": days_arr - 1,
"close": days_arr,
"volume": days_arr * 100,
},
index=days,
)
if interval > 1:
# only keep every 'interval' rows
for idx, _ in enumerate(days_arr):
if (idx + 1) % interval != 0:
df["open"].iloc[idx] = 0
df["high"].iloc[idx] = 0
df["low"].iloc[idx] = 0
df["close"].iloc[idx] = 0
df["volume"].iloc[idx] = 0
return df
def trades_by_sid_to_dfs(trades_by_sid, index):
for sidint, trades in iteritems(trades_by_sid):
opens = []
highs = []
lows = []
closes = []
volumes = []
for trade in trades:
opens.append(trade.open_price)
highs.append(trade.high)
lows.append(trade.low)
closes.append(trade.close_price)
volumes.append(trade.volume)
yield sidint, pd.DataFrame(
{
"open": opens,
"high": highs,
"low": lows,
"close": closes,
"volume": volumes,
},
index=index,
)
def create_data_portal_from_trade_history(asset_finder, trading_calendar,
tempdir, sim_params, trades_by_sid):
if sim_params.data_frequency == "daily":
path = os.path.join(tempdir.path, "testdaily.bcolz")
writer = BcolzDailyBarWriter(
path, trading_calendar,
sim_params.start_session,
sim_params.end_session
)
writer.write(
trades_by_sid_to_dfs(trades_by_sid, sim_params.sessions),
)
equity_daily_reader = BcolzDailyBarReader(path)
return DataPortal(
asset_finder, trading_calendar,
first_trading_day=equity_daily_reader.first_trading_day,
equity_daily_reader=equity_daily_reader,
)
else:
minutes = trading_calendar.minutes_in_range(
sim_params.first_open,
sim_params.last_close
)
length = len(minutes)
assets = {}
for sidint, trades in iteritems(trades_by_sid):
opens = np.zeros(length)
highs = np.zeros(length)
lows = np.zeros(length)
closes = np.zeros(length)
volumes = np.zeros(length)
for trade in trades:
# put them in the right place
idx = minutes.searchsorted(trade.dt)
opens[idx] = trade.open_price * 1000
highs[idx] = trade.high * 1000
lows[idx] = trade.low * 1000
closes[idx] = trade.close_price * 1000
volumes[idx] = trade.volume
assets[sidint] = pd.DataFrame({
"open": opens,
"high": highs,
"low": lows,
"close": closes,
"volume": volumes,
"dt": minutes
}).set_index("dt")
write_bcolz_minute_data(
trading_calendar,
sim_params.sessions,
tempdir.path,
assets
)
equity_minute_reader = BcolzMinuteBarReader(tempdir.path)
return DataPortal(
asset_finder, trading_calendar,
first_trading_day=equity_minute_reader.first_trading_day,
equity_minute_reader=equity_minute_reader,
)
class FakeDataPortal(DataPortal):
def __init__(self, env, trading_calendar=None,
first_trading_day=None):
if trading_calendar is None:
trading_calendar = get_calendar("NYSE")
super(FakeDataPortal, self).__init__(env.asset_finder,
trading_calendar,
first_trading_day)
def get_spot_value(self, asset, field, dt, data_frequency):
if field == "volume":
return 100
else:
return 1.0
def get_history_window(self, assets, end_dt, bar_count, frequency, field,
data_frequency, ffill=True):
if frequency == "1d":
end_idx = \
self.trading_calendar.all_sessions.searchsorted(end_dt)
days = self.trading_calendar.all_sessions[
(end_idx - bar_count + 1):(end_idx + 1)
]
df = pd.DataFrame(
np.full((bar_count, len(assets)), 100.0),
index=days,
columns=assets
)
return df
class FetcherDataPortal(DataPortal):
"""
Mock dataportal that returns fake data for history and non-fetcher
spot value.
"""
def __init__(self, asset_finder, trading_calendar, first_trading_day=None):
super(FetcherDataPortal, self).__init__(asset_finder, trading_calendar,
first_trading_day)
def get_spot_value(self, asset, field, dt, data_frequency):
# if this is a fetcher field, exercise the regular code path
if self._is_extra_source(asset, field, self._augmented_sources_map):
return super(FetcherDataPortal, self).get_spot_value(
asset, field, dt, data_frequency)
# otherwise just return a fixed value
return int(asset)
# XXX: These aren't actually the methods that are used by the superclasses,
# so these don't do anything, and this class will likely produce unexpected
# results for history().
def _get_daily_window_for_sid(self, asset, field, days_in_window,
extra_slot=True):
return np.arange(days_in_window, dtype=np.float64)
def _get_minute_window_for_asset(self, asset, field, minutes_for_window):
return np.arange(minutes_for_window, dtype=np.float64)
class tmp_assets_db(object):
"""Create a temporary assets sqlite database.
This is meant to be used as a context manager.
Parameters
----------
url : string
The URL for the database connection.
**frames
The frames to pass to the AssetDBWriter.
By default this maps equities:
('A', 'B', 'C') -> map(ord, 'ABC')
See Also
--------
empty_assets_db
tmp_asset_finder
"""
_default_equities = sentinel('_default_equities')
def __init__(self,
url='sqlite:///:memory:',
equities=_default_equities,
**frames):
self._url = url
self._eng = None
if equities is self._default_equities:
equities = make_simple_equity_info(
list(map(ord, 'ABC')),
pd.Timestamp(0),
pd.Timestamp('2015'),
)
frames['equities'] = equities
self._frames = frames
self._eng = None # set in enter and exit
def __enter__(self):
self._eng = eng = create_engine(self._url)
AssetDBWriter(eng).write(**self._frames)
return eng
def __exit__(self, *excinfo):
assert self._eng is not None, '_eng was not set in __enter__'
self._eng.dispose()
self._eng = None
def empty_assets_db():
"""Context manager for creating an empty assets db.
See Also
--------
tmp_assets_db
"""
return tmp_assets_db(equities=None)
class tmp_asset_finder(tmp_assets_db):
"""Create a temporary asset finder using an in memory sqlite db.
Parameters
----------
url : string
The URL for the database connection.
finder_cls : type, optional
The type of asset finder to create from the assets db.
**frames
Forwarded to ``tmp_assets_db``.
See Also
--------
tmp_assets_db
"""
def __init__(self,
url='sqlite:///:memory:',
finder_cls=AssetFinder,
**frames):
self._finder_cls = finder_cls
super(tmp_asset_finder, self).__init__(url=url, **frames)
def __enter__(self):
return self._finder_cls(super(tmp_asset_finder, self).__enter__())
def empty_asset_finder():
"""Context manager for creating an empty asset finder.
See Also
--------
empty_assets_db
tmp_assets_db
tmp_asset_finder
"""
return tmp_asset_finder(equities=None)
class tmp_trading_env(tmp_asset_finder):
"""Create a temporary trading environment.
Parameters
----------
load : callable, optional
Function that returns benchmark returns and treasury curves.
finder_cls : type, optional
The type of asset finder to create from the assets db.
**frames
Forwarded to ``tmp_assets_db``.
See Also
--------
empty_trading_env
tmp_asset_finder
"""
def __init__(self, load=None, *args, **kwargs):
super(tmp_trading_env, self).__init__(*args, **kwargs)
self._load = load
def __enter__(self):
return TradingEnvironment(
load=self._load,
asset_db_path=super(tmp_trading_env, self).__enter__().engine,
)
def empty_trading_env():
return tmp_trading_env(equities=None)
class SubTestFailures(AssertionError):
def __init__(self, *failures):
self.failures = failures
def __str__(self):
return 'failures:\n %s' % '\n '.join(
'\n '.join((
', '.join('%s=%r' % item for item in scope.items()),
'%s: %s' % (type(exc).__name__, exc),
)) for scope, exc in self.failures,
)
@nottest
def subtest(iterator, *_names):
"""
Construct a subtest in a unittest.
Consider using ``zipline.testing.parameter_space`` when subtests
are constructed over a single input or over the cross-product of multiple
inputs.
``subtest`` works by decorating a function as a subtest. The decorated
function will be run by iterating over the ``iterator`` and *unpacking the
values into the function. If any of the runs fail, the result will be put
into a set and the rest of the tests will be run. Finally, if any failed,
all of the results will be dumped as one failure.
Parameters
----------
iterator : iterable[iterable]
The iterator of arguments to pass to the function.
*name : iterator[str]
The names to use for each element of ``iterator``. These will be used
to print the scope when a test fails. If not provided, it will use the
integer index of the value as the name.
Examples
--------
::
class MyTest(TestCase):
def test_thing(self):
# Example usage inside another test.
@subtest(([n] for n in range(100000)), 'n')
def subtest(n):
self.assertEqual(n % 2, 0, 'n was not even')
subtest()
@subtest(([n] for n in range(100000)), 'n')
def test_decorated_function(self, n):
# Example usage to parameterize an entire function.
self.assertEqual(n % 2, 1, 'n was not odd')
Notes
-----
We use this when we:
* Will never want to run each parameter individually.
* Have a large parameter space we are testing
(see tests/utils/test_events.py).
``nose_parameterized.expand`` will create a test for each parameter
combination which bloats the test output and makes the travis pages slow.
We cannot use ``unittest2.TestCase.subTest`` because nose, pytest, and
nose2 do not support ``addSubTest``.
See Also
--------
zipline.testing.parameter_space
"""
def dec(f):
@wraps(f)
def wrapped(*args, **kwargs):
names = _names
failures = []
for scope in iterator:
scope = tuple(scope)
try:
f(*args + scope, **kwargs)
except Exception as e:
if not names:
names = count()
failures.append((dict(zip(names, scope)), e))
if failures:
raise SubTestFailures(*failures)
return wrapped
return dec
class MockDailyBarReader(object):
def get_value(self, col, sid, dt):
return 100
def create_mock_adjustment_data(splits=None, dividends=None, mergers=None):
if splits is None:
splits = create_empty_splits_mergers_frame()
elif not isinstance(splits, pd.DataFrame):
splits = pd.DataFrame(splits)
if mergers is None:
mergers = create_empty_splits_mergers_frame()
elif not isinstance(mergers, pd.DataFrame):
mergers = pd.DataFrame(mergers)
if dividends is None:
dividends = create_empty_dividends_frame()
elif not isinstance(dividends, pd.DataFrame):
dividends = pd.DataFrame(dividends)
return splits, mergers, dividends
def create_mock_adjustments(tempdir, days, splits=None, dividends=None,
mergers=None):
path = tempdir.getpath("test_adjustments.db")
SQLiteAdjustmentWriter(path, MockDailyBarReader(), days).write(
*create_mock_adjustment_data(splits, dividends, mergers)
)
return path
def assert_timestamp_equal(left, right, compare_nat_equal=True, msg=""):
"""
Assert that two pandas Timestamp objects are the same.
Parameters
----------
left, right : pd.Timestamp
The values to compare.
compare_nat_equal : bool, optional
Whether to consider `NaT` values equal. Defaults to True.
msg : str, optional
A message to forward to `pd.util.testing.assert_equal`.
"""
if compare_nat_equal and left is pd.NaT and right is pd.NaT:
return
return pd.util.testing.assert_equal(left, right, msg=msg)
def powerset(values):
"""
Return the power set (i.e., the set of all subsets) of entries in `values`.
"""
return concat(combinations(values, i) for i in range(len(values) + 1))
def to_series(knowledge_dates, earning_dates):
"""
Helper for converting a dict of strings to a Series of datetimes.
This is just for making the test cases more readable.
"""
return pd.Series(
index=pd.to_datetime(knowledge_dates),
data=pd.to_datetime(earning_dates),
)
def gen_calendars(start, stop, critical_dates):
"""
Generate calendars to use as inputs.
"""
all_dates = pd.date_range(start, stop, tz='utc')
for to_drop in map(list, powerset(critical_dates)):
# Have to yield tuples.
yield (all_dates.drop(to_drop),)
# Also test with the trading calendar.
trading_days = get_calendar("NYSE").all_days
yield (trading_days[trading_days.slice_indexer(start, stop)],)
@contextmanager
def temp_pipeline_engine(calendar, sids, random_seed, symbols=None):
"""
A contextManager that yields a SimplePipelineEngine holding a reference to
an AssetFinder generated via tmp_asset_finder.
Parameters
----------
calendar : pd.DatetimeIndex
Calendar to pass to the constructed PipelineEngine.
sids : iterable[int]
Sids to use for the temp asset finder.
random_seed : int
Integer used to seed instances of SeededRandomLoader.
symbols : iterable[str], optional
Symbols for constructed assets. Forwarded to make_simple_equity_info.
"""
equity_info = make_simple_equity_info(
sids=sids,
start_date=calendar[0],
end_date=calendar[-1],
symbols=symbols,
)
loader = make_seeded_random_loader(random_seed, calendar, sids)
def get_loader(column):
return loader
with tmp_asset_finder(equities=equity_info) as finder:
yield SimplePipelineEngine(get_loader, calendar, finder)
def parameter_space(__fail_fast=False, **params):
"""
Wrapper around subtest that allows passing keywords mapping names to
iterables of values.
The decorated test function will be called with the cross-product of all
possible inputs
Examples
--------
>>> from unittest import TestCase
>>> class SomeTestCase(TestCase):
... @parameter_space(x=[1, 2], y=[2, 3])
... def test_some_func(self, x, y):
... # Will be called with every possible combination of x and y.
... self.assertEqual(somefunc(x, y), expected_result(x, y))
See Also
--------
zipline.testing.subtest
"""
def decorator(f):
argspec = getargspec(f)
if argspec.varargs:
raise AssertionError("parameter_space() doesn't support *args")
if argspec.keywords:
raise AssertionError("parameter_space() doesn't support **kwargs")
if argspec.defaults:
raise AssertionError("parameter_space() doesn't support defaults.")
# Skip over implicit self.
argnames = argspec.args
if argnames[0] == 'self':
argnames = argnames[1:]
extra = set(params) - set(argnames)
if extra:
raise AssertionError(
"Keywords %s supplied to parameter_space() are "
"not in function signature." % extra
)
unspecified = set(argnames) - set(params)
if unspecified:
raise AssertionError(
"Function arguments %s were not "
"supplied to parameter_space()." % extra
)
def make_param_sets():
return product(*(params[name] for name in argnames))
if __fail_fast:
@wraps(f)
def wrapped(self):
for args in make_param_sets():
f(self, *args)
return wrapped
else:
@wraps(f)
def wrapped(*args, **kwargs):
subtest(make_param_sets(), *argnames)(f)(*args, **kwargs)
return wrapped
return decorator
def create_empty_dividends_frame():
return pd.DataFrame(
np.array(
[],
dtype=[
('ex_date', 'datetime64[ns]'),
('pay_date', 'datetime64[ns]'),
('record_date', 'datetime64[ns]'),
('declared_date', 'datetime64[ns]'),
('amount', 'float64'),
('sid', 'int32'),
],
),
index=pd.DatetimeIndex([], tz='UTC'),
)
def create_empty_splits_mergers_frame():
return pd.DataFrame(
np.array(
[],
dtype=[
('effective_date', 'int64'),
('ratio', 'float64'),
('sid', 'int64'),
],
),
index=pd.DatetimeIndex([]),
)
def make_alternating_boolean_array(shape, first_value=True):
"""
Create a 2D numpy array with the given shape containing alternating values
of False, True, False, True,... along each row and each column.
Examples
--------
>>> make_alternating_boolean_array((4,4))
array([[ True, False, True, False],
[False, True, False, True],
[ True, False, True, False],
[False, True, False, True]], dtype=bool)
>>> make_alternating_boolean_array((4,3), first_value=False)
array([[False, True, False],
[ True, False, True],
[False, True, False],
[ True, False, True]], dtype=bool)
"""
if len(shape) != 2:
raise ValueError(
'Shape must be 2-dimensional. Given shape was {}'.format(shape)
)
alternating = np.empty(shape, dtype=np.bool)
for row in alternating:
row[::2] = first_value
row[1::2] = not(first_value)
first_value = not(first_value)
return alternating
def make_cascading_boolean_array(shape, first_value=True):
"""
Create a numpy array with the given shape containing cascading boolean
values, with `first_value` being the top-left value.
Examples
--------
>>> make_cascading_boolean_array((4,4))
array([[ True, True, True, False],
[ True, True, False, False],
[ True, False, False, False],
[False, False, False, False]], dtype=bool)
>>> make_cascading_boolean_array((4,2))
array([[ True, False],
[False, False],
[False, False],
[False, False]], dtype=bool)
>>> make_cascading_boolean_array((2,4))
array([[ True, True, True, False],
[ True, True, False, False]], dtype=bool)
"""
if len(shape) != 2:
raise ValueError(
'Shape must be 2-dimensional. Given shape was {}'.format(shape)
)
cascading = np.full(shape, not(first_value), dtype=np.bool)
ending_col = shape[1] - 1
for row in cascading:
if ending_col > 0:
row[:ending_col] = first_value
ending_col -= 1
else:
break
return cascading
@expect_dimensions(array=2)
def permute_rows(seed, array):
"""
Shuffle each row in ``array`` based on permutations generated by ``seed``.
Parameters
----------
seed : int
Seed for numpy.RandomState
array : np.ndarray[ndim=2]
Array over which to apply permutations.
"""
rand = np.random.RandomState(seed)
return np.apply_along_axis(rand.permutation, 1, array)
@nottest
def make_test_handler(testcase, *args, **kwargs):
"""
Returns a TestHandler which will be used by the given testcase. This
handler can be used to test log messages.
Parameters
----------
testcase: unittest.TestCase
The test class in which the log handler will be used.
*args, **kwargs
Forwarded to the new TestHandler object.
Returns
-------
handler: logbook.TestHandler
The handler to use for the test case.
"""
handler = TestHandler(*args, **kwargs)
testcase.addCleanup(handler.close)
return handler
def write_compressed(path, content):
"""
Write a compressed (gzipped) file to `path`.
"""
with gzip.open(path, 'wb') as f:
f.write(content)
def read_compressed(path):
"""
Write a compressed (gzipped) file from `path`.
"""
with gzip.open(path, 'rb') as f:
return f.read()
zipline_git_root = abspath(
join(realpath(dirname(__file__)), '..', '..'),
)
@nottest
def test_resource_path(*path_parts):
return os.path.join(zipline_git_root, 'tests', 'resources', *path_parts)
@contextmanager
def patch_os_environment(remove=None, **values):
"""
Context manager for patching the operating system environment.
"""
old_values = {}
remove = remove or []
for key in remove:
old_values[key] = os.environ.pop(key)
for key, value in values.iteritems():
old_values[key] = os.getenv(key)
os.environ[key] = value
try:
yield
finally:
for old_key, old_value in old_values.iteritems():
if old_value is None:
# Value was not present when we entered, so del it out if it's
# still present.
try:
del os.environ[key]
except KeyError:
pass
else:
# Restore the old value.
os.environ[old_key] = old_value
class tmp_dir(TempDirectory, object):
"""New style class that wrapper for TempDirectory in python 2.
"""
pass
class _TmpBarReader(with_metaclass(ABCMeta, tmp_dir)):
"""A helper for tmp_bcolz_equity_minute_bar_reader and
tmp_bcolz_equity_daily_bar_reader.
Parameters
----------
env : TradingEnvironment
The trading env.
days : pd.DatetimeIndex
The days to write for.
data : dict[int -> pd.DataFrame]
The data to write.
path : str, optional
The path to the directory to write the data into. If not given, this
will be a unique name.
"""
@abstractproperty
def _reader_cls(self):
raise NotImplementedError('_reader')
@abstractmethod
def _write(self, env, days, path, data):
raise NotImplementedError('_write')
def __init__(self, env, days, data, path=None):
super(_TmpBarReader, self).__init__(path=path)
self._env = env
self._days = days
self._data = data
def __enter__(self):
tmpdir = super(_TmpBarReader, self).__enter__()
env = self._env
try:
self._write(
env,
self._days,
tmpdir.path,
self._data,
)
return self._reader_cls(tmpdir.path)
except:
self.__exit__(None, None, None)
raise
class tmp_bcolz_equity_minute_bar_reader(_TmpBarReader):
"""A temporary BcolzMinuteBarReader object.
Parameters
----------
env : TradingEnvironment
The trading env.
days : pd.DatetimeIndex
The days to write for.
data : iterable[(int, pd.DataFrame)]
The data to write.
path : str, optional
The path to the directory to write the data into. If not given, this
will be a unique name.
See Also
--------
tmp_bcolz_equity_daily_bar_reader
"""
_reader_cls = BcolzMinuteBarReader
_write = staticmethod(write_bcolz_minute_data)
class tmp_bcolz_equity_daily_bar_reader(_TmpBarReader):
"""A temporary BcolzDailyBarReader object.
Parameters
----------
env : TradingEnvironment
The trading env.
days : pd.DatetimeIndex
The days to write for.
data : dict[int -> pd.DataFrame]
The data to write.
path : str, optional
The path to the directory to write the data into. If not given, this
will be a unique name.
See Also
--------
tmp_bcolz_equity_daily_bar_reader
"""
_reader_cls = BcolzDailyBarReader
@staticmethod
def _write(env, days, path, data):
BcolzDailyBarWriter(path, days).write(data)
@contextmanager
def patch_read_csv(url_map, module=pd, strict=False):
"""Patch pandas.read_csv to map lookups from url to another.
Parameters
----------
url_map : mapping[str or file-like object -> str or file-like object]
The mapping to use to redirect read_csv calls.
module : module, optional
The module to patch ``read_csv`` on. By default this is ``pandas``.
This should be set to another module if ``read_csv`` is early-bound
like ``from pandas import read_csv`` instead of late-bound like:
``import pandas as pd; pd.read_csv``.
strict : bool, optional
If true, then this will assert that ``read_csv`` is only called with
elements in the ``url_map``.
"""
read_csv = pd.read_csv
def patched_read_csv(filepath_or_buffer, *args, **kwargs):
if filepath_or_buffer in url_map:
return read_csv(url_map[filepath_or_buffer], *args, **kwargs)
elif not strict:
return read_csv(filepath_or_buffer, *args, **kwargs)
else:
raise AssertionError(
'attempted to call read_csv on %r which not in the url map' %
filepath_or_buffer,
)
with patch.object(module, 'read_csv', patched_read_csv):
yield
def copy_market_data(src_market_data_dir, dest_root_dir):
symbol = 'SPY'
filenames = (get_benchmark_filename(symbol), INDEX_MAPPING[symbol][1])
ensure_directory(os.path.join(dest_root_dir, 'data'))
for filename in filenames:
shutil.copyfile(
os.path.join(src_market_data_dir, filename),
os.path.join(dest_root_dir, 'data', filename)
)
@curry
def ensure_doctest(f, name=None):
"""Ensure that an object gets doctested. This is useful for instances
of objects like curry or partial which are not discovered by default.
Parameters
----------
f : any
The thing to doctest.
name : str, optional
The name to use in the doctest function mapping. If this is None,
Then ``f.__name__`` will be used.
Returns
-------
f : any
``f`` unchanged.
"""
_getframe(2).f_globals.setdefault('__test__', {})[
f.__name__ if name is None else name
] = f
return f
class RecordBatchBlotter(Blotter):
"""Blotter that tracks how its batch_order method was called.
"""
def __init__(self, data_frequency):
super(RecordBatchBlotter, self).__init__(data_frequency)
self.order_batch_called = []
def batch_order(self, *args, **kwargs):
self.order_batch_called.append((args, kwargs))
return super(RecordBatchBlotter, self).batch_order(*args, **kwargs)
####################################
# Shared factors for pipeline tests.
####################################
class AssetID(CustomFactor):
"""
CustomFactor that returns the AssetID of each asset.
Useful for providing a Factor that produces a different value for each
asset.
"""
window_length = 1
inputs = ()
def compute(self, today, assets, out):
out[:] = assets
class AssetIDPlusDay(CustomFactor):
window_length = 1
inputs = ()
def compute(self, today, assets, out):
out[:] = assets + today.day
class OpenPrice(CustomFactor):
window_length = 1
inputs = [USEquityPricing.open]
def compute(self, today, assets, out, open):
out[:] = open
| bartosh/zipline | zipline/testing/core.py | Python | apache-2.0 | 47,174 |
/* Copyright (C) 2013-2014 Computer Sciences Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. */
/*
* Copyright (c) 2008-2014 MongoDB, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bson.util;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ConcurrentMap;
import static java.util.Collections.unmodifiableList;
import static org.bson.util.CopyOnWriteMap.newHashMap;
class ClassAncestry {
/**
* getAncestry
*
* Walks superclass and interface graph, superclasses first, then
* interfaces, to compute an ancestry list. Supertypes are visited left to
* right. Duplicates are removed such that no Class will appear in the list
* before one of its subtypes.
*
* Does not need to be synchronized, races are harmless as the Class graph
* does not change at runtime.
*/
public static <T> List<Class<?>> getAncestry(Class<T> c) {
final ConcurrentMap<Class<?>, List<Class<?>>> cache = getClassAncestryCache();
while (true) {
List<Class<?>> cachedResult = cache.get(c);
if (cachedResult != null) {
return cachedResult;
}
cache.putIfAbsent(c, computeAncestry(c));
}
}
/**
* computeAncestry, starting with children and going back to parents
*/
private static List<Class<?>> computeAncestry(Class<?> c) {
final List<Class<?>> result = new ArrayList<Class<?>>();
result.add(Object.class);
computeAncestry(c, result);
Collections.reverse(result);
return unmodifiableList(new ArrayList<Class<?>>(result));
}
private static <T> void computeAncestry(Class<T> c, List<Class<?>> result) {
if ((c == null) || (c == Object.class)) {
return;
}
// first interfaces (looks backwards but is not)
Class<?>[] interfaces = c.getInterfaces();
for (int i = interfaces.length - 1; i >= 0; i--) {
computeAncestry(interfaces[i], result);
}
// next superclass
computeAncestry(c.getSuperclass(), result);
if (!result.contains(c))
result.add(c);
}
/**
* classAncestryCache
*/
private static ConcurrentMap<Class<?>, List<Class<?>>> getClassAncestryCache() {
return (_ancestryCache);
}
private static final ConcurrentMap<Class<?>, List<Class<?>>> _ancestryCache = newHashMap();
}
| ezbake/ezmongo | ezmongo-java-driver/src/main/org/bson/util/ClassAncestry.java | Java | apache-2.0 | 3,530 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.provenance.lineage;
import static java.util.Objects.requireNonNull;
public class FlowFileNode implements LineageNode {
private final String flowFileUuid;
private final long creationTime;
private String clusterNodeIdentifier;
public FlowFileNode(final String flowFileUuid, final long flowFileCreationTime) {
this.flowFileUuid = requireNonNull(flowFileUuid);
this.creationTime = flowFileCreationTime;
}
@Override
public String getIdentifier() {
return flowFileUuid;
}
@Override
public long getTimestamp() {
return creationTime;
}
@Override
public String getClusterNodeIdentifier() {
return clusterNodeIdentifier;
}
@Override
public LineageNodeType getNodeType() {
return LineageNodeType.FLOWFILE_NODE;
}
@Override
public String getFlowFileUuid() {
return flowFileUuid;
}
@Override
public int hashCode() {
return 23498723 + flowFileUuid.hashCode();
}
@Override
public boolean equals(final Object obj) {
if (obj == null) {
return false;
}
if (obj == this) {
return true;
}
if (!(obj instanceof FlowFileNode)) {
return false;
}
final FlowFileNode other = (FlowFileNode) obj;
return flowFileUuid.equals(other.flowFileUuid);
}
@Override
public String toString() {
return "FlowFile[UUID=" + flowFileUuid + "]";
}
}
| rdblue/incubator-nifi | commons/data-provenance-utils/src/main/java/org/apache/nifi/provenance/lineage/FlowFileNode.java | Java | apache-2.0 | 2,335 |
var less, tree;
if (typeof environment === "object" && ({}).toString.call(environment) === "[object Environment]") {
// Rhino
// Details on how to detect Rhino: https://github.com/ringo/ringojs/issues/88
if (typeof(window) === 'undefined') { less = {} }
else { less = window.less = {} }
tree = less.tree = {};
less.mode = 'rhino';
} else if (typeof(window) === 'undefined') {
// Node.js
less = exports,
tree = require('./tree');
less.mode = 'node';
} else {
// Browser
if (typeof(window.less) === 'undefined') { window.less = {} }
less = window.less,
tree = window.less.tree = {};
less.mode = 'browser';
}
//
// less.js - parser
//
// A relatively straight-forward predictive parser.
// There is no tokenization/lexing stage, the input is parsed
// in one sweep.
//
// To make the parser fast enough to run in the browser, several
// optimization had to be made:
//
// - Matching and slicing on a huge input is often cause of slowdowns.
// The solution is to chunkify the input into smaller strings.
// The chunks are stored in the `chunks` var,
// `j` holds the current chunk index, and `current` holds
// the index of the current chunk in relation to `input`.
// This gives us an almost 4x speed-up.
//
// - In many cases, we don't need to match individual tokens;
// for example, if a value doesn't hold any variables, operations
// or dynamic references, the parser can effectively 'skip' it,
// treating it as a literal.
// An example would be '1px solid #000' - which evaluates to itself,
// we don't need to know what the individual components are.
// The drawback, of course is that you don't get the benefits of
// syntax-checking on the CSS. This gives us a 50% speed-up in the parser,
// and a smaller speed-up in the code-gen.
//
//
// Token matching is done with the `$` function, which either takes
// a terminal string or regexp, or a non-terminal function to call.
// It also takes care of moving all the indices forwards.
//
//
less.Parser = function Parser(env) {
var input, // LeSS input string
i, // current index in `input`
j, // current chunk
temp, // temporarily holds a chunk's state, for backtracking
memo, // temporarily holds `i`, when backtracking
furthest, // furthest index the parser has gone to
chunks, // chunkified input
current, // index of current chunk, in `input`
parser;
var that = this;
// This function is called after all files
// have been imported through `@import`.
var finish = function () {};
var imports = this.imports = {
paths: env && env.paths || [], // Search paths, when importing
queue: [], // Files which haven't been imported yet
files: {}, // Holds the imported parse trees
contents: {}, // Holds the imported file contents
mime: env && env.mime, // MIME type of .less files
error: null, // Error in parsing/evaluating an import
push: function (path, callback) {
var that = this;
this.queue.push(path);
//
// Import a file asynchronously
//
less.Parser.importer(path, this.paths, function (e, root, contents) {
that.queue.splice(that.queue.indexOf(path), 1); // Remove the path from the queue
var imported = path in that.files;
that.files[path] = root; // Store the root
that.contents[path] = contents;
if (e && !that.error) { that.error = e }
callback(e, root, imported);
if (that.queue.length === 0) { finish() } // Call `finish` if we're done importing
}, env);
}
};
function save() { temp = chunks[j], memo = i, current = i }
function restore() { chunks[j] = temp, i = memo, current = i }
function sync() {
if (i > current) {
chunks[j] = chunks[j].slice(i - current);
current = i;
}
}
//
// Parse from a token, regexp or string, and move forward if match
//
function $(tok) {
var match, args, length, c, index, endIndex, k, mem;
//
// Non-terminal
//
if (tok instanceof Function) {
return tok.call(parser.parsers);
//
// Terminal
//
// Either match a single character in the input,
// or match a regexp in the current chunk (chunk[j]).
//
} else if (typeof(tok) === 'string') {
match = input.charAt(i) === tok ? tok : null;
length = 1;
sync ();
} else {
sync ();
if (match = tok.exec(chunks[j])) {
length = match[0].length;
} else {
return null;
}
}
// The match is confirmed, add the match length to `i`,
// and consume any extra white-space characters (' ' || '\n')
// which come after that. The reason for this is that LeSS's
// grammar is mostly white-space insensitive.
//
if (match) {
mem = i += length;
endIndex = i + chunks[j].length - length;
while (i < endIndex) {
c = input.charCodeAt(i);
if (! (c === 32 || c === 10 || c === 9)) { break }
i++;
}
chunks[j] = chunks[j].slice(length + (i - mem));
current = i;
if (chunks[j].length === 0 && j < chunks.length - 1) { j++ }
if(typeof(match) === 'string') {
return match;
} else {
return match.length === 1 ? match[0] : match;
}
}
}
function expect(arg, msg) {
var result = $(arg);
if (! result) {
error(msg || (typeof(arg) === 'string' ? "expected '" + arg + "' got '" + input.charAt(i) + "'"
: "unexpected token"));
} else {
return result;
}
}
function error(msg, type) {
throw { index: i, type: type || 'Syntax', message: msg };
}
// Same as $(), but don't change the state of the parser,
// just return the match.
function peek(tok) {
if (typeof(tok) === 'string') {
return input.charAt(i) === tok;
} else {
if (tok.test(chunks[j])) {
return true;
} else {
return false;
}
}
}
function basename(pathname) {
if (less.mode === 'node') {
return require('path').basename(pathname);
} else {
return pathname.match(/[^\/]+$/)[0];
}
}
function getInput(e, env) {
if (e.filename && env.filename && (e.filename !== env.filename)) {
return parser.imports.contents[basename(e.filename)];
} else {
return input;
}
}
function getLocation(index, input) {
for (var n = index, column = -1;
n >= 0 && input.charAt(n) !== '\n';
n--) { column++ }
return { line: typeof(index) === 'number' ? (input.slice(0, index).match(/\n/g) || "").length : null,
column: column };
}
function LessError(e, env) {
var input = getInput(e, env),
loc = getLocation(e.index, input),
line = loc.line,
col = loc.column,
lines = input.split('\n');
this.type = e.type || 'Syntax';
this.message = e.message;
this.filename = e.filename || env.filename;
this.index = e.index;
this.line = typeof(line) === 'number' ? line + 1 : null;
this.callLine = e.call && (getLocation(e.call, input).line + 1);
this.callExtract = lines[getLocation(e.call, input).line];
this.stack = e.stack;
this.column = col;
this.extract = [
lines[line - 1],
lines[line],
lines[line + 1]
];
}
this.env = env = env || {};
// The optimization level dictates the thoroughness of the parser,
// the lower the number, the less nodes it will create in the tree.
// This could matter for debugging, or if you want to access
// the individual nodes in the tree.
this.optimization = ('optimization' in this.env) ? this.env.optimization : 1;
this.env.filename = this.env.filename || null;
//
// The Parser
//
return parser = {
imports: imports,
//
// Parse an input string into an abstract syntax tree,
// call `callback` when done.
//
parse: function (str, callback) {
var root, start, end, zone, line, lines, buff = [], c, error = null;
i = j = current = furthest = 0;
input = str.replace(/\r\n/g, '\n');
// Split the input into chunks.
chunks = (function (chunks) {
var j = 0,
skip = /[^"'`\{\}\/\(\)\\]+/g,
comment = /\/\*(?:[^*]|\*+[^\/*])*\*+\/|\/\/.*/g,
string = /"((?:[^"\\\r\n]|\\.)*)"|'((?:[^'\\\r\n]|\\.)*)'|`((?:[^`\\\r\n]|\\.)*)`/g,
level = 0,
match,
chunk = chunks[0],
inParam;
for (var i = 0, c, cc; i < input.length; i++) {
skip.lastIndex = i;
if (match = skip.exec(input)) {
if (match.index === i) {
i += match[0].length;
chunk.push(match[0]);
}
}
c = input.charAt(i);
comment.lastIndex = string.lastIndex = i;
if (match = string.exec(input)) {
if (match.index === i) {
i += match[0].length;
chunk.push(match[0]);
c = input.charAt(i);
}
}
if (!inParam && c === '/') {
cc = input.charAt(i + 1);
if (cc === '/' || cc === '*') {
if (match = comment.exec(input)) {
if (match.index === i) {
i += match[0].length;
chunk.push(match[0]);
c = input.charAt(i);
}
}
}
}
switch (c) {
case '{': if (! inParam) { level ++; chunk.push(c); break }
case '}': if (! inParam) { level --; chunk.push(c); chunks[++j] = chunk = []; break }
case '(': if (! inParam) { inParam = true; chunk.push(c); break }
case ')': if ( inParam) { inParam = false; chunk.push(c); break }
default: chunk.push(c);
}
}
if (level > 0) {
error = new(LessError)({
index: i,
type: 'Parse',
message: "missing closing `}`",
filename: env.filename
}, env);
}
return chunks.map(function (c) { return c.join('') });;
})([[]]);
if (error) {
return callback(error);
}
// Start with the primary rule.
// The whole syntax tree is held under a Ruleset node,
// with the `root` property set to true, so no `{}` are
// output. The callback is called when the input is parsed.
try {
root = new(tree.Ruleset)([], $(this.parsers.primary));
root.root = true;
} catch (e) {
return callback(new(LessError)(e, env));
}
root.toCSS = (function (evaluate) {
var line, lines, column;
return function (options, variables) {
var frames = [], importError;
options = options || {};
//
// Allows setting variables with a hash, so:
//
// `{ color: new(tree.Color)('#f01') }` will become:
//
// new(tree.Rule)('@color',
// new(tree.Value)([
// new(tree.Expression)([
// new(tree.Color)('#f01')
// ])
// ])
// )
//
if (typeof(variables) === 'object' && !Array.isArray(variables)) {
variables = Object.keys(variables).map(function (k) {
var value = variables[k];
if (! (value instanceof tree.Value)) {
if (! (value instanceof tree.Expression)) {
value = new(tree.Expression)([value]);
}
value = new(tree.Value)([value]);
}
return new(tree.Rule)('@' + k, value, false, 0);
});
frames = [new(tree.Ruleset)(null, variables)];
}
try {
var css = evaluate.call(this, { frames: frames })
.toCSS([], { compress: options.compress || false });
} catch (e) {
throw new(LessError)(e, env);
}
if ((importError = parser.imports.error)) { // Check if there was an error during importing
if (importError instanceof LessError) throw importError;
else throw new(LessError)(importError, env);
}
if (options.yuicompress && less.mode === 'node') {
return require('./cssmin').compressor.cssmin(css);
} else if (options.compress) {
return css.replace(/(\s)+/g, "$1");
} else {
return css;
}
};
})(root.eval);
// If `i` is smaller than the `input.length - 1`,
// it means the parser wasn't able to parse the whole
// string, so we've got a parsing error.
//
// We try to extract a \n delimited string,
// showing the line where the parse error occured.
// We split it up into two parts (the part which parsed,
// and the part which didn't), so we can color them differently.
if (i < input.length - 1) {
i = furthest;
lines = input.split('\n');
line = (input.slice(0, i).match(/\n/g) || "").length + 1;
for (var n = i, column = -1; n >= 0 && input.charAt(n) !== '\n'; n--) { column++ }
error = {
type: "Parse",
message: "Syntax Error on line " + line,
index: i,
filename: env.filename,
line: line,
column: column,
extract: [
lines[line - 2],
lines[line - 1],
lines[line]
]
};
}
if (this.imports.queue.length > 0) {
finish = function () { callback(error, root) };
} else {
callback(error, root);
}
},
//
// Here in, the parsing rules/functions
//
// The basic structure of the syntax tree generated is as follows:
//
// Ruleset -> Rule -> Value -> Expression -> Entity
//
// Here's some LESS code:
//
// .class {
// color: #fff;
// border: 1px solid #000;
// width: @w + 4px;
// > .child {...}
// }
//
// And here's what the parse tree might look like:
//
// Ruleset (Selector '.class', [
// Rule ("color", Value ([Expression [Color #fff]]))
// Rule ("border", Value ([Expression [Dimension 1px][Keyword "solid"][Color #000]]))
// Rule ("width", Value ([Expression [Operation "+" [Variable "@w"][Dimension 4px]]]))
// Ruleset (Selector [Element '>', '.child'], [...])
// ])
//
// In general, most rules will try to parse a token with the `$()` function, and if the return
// value is truly, will return a new node, of the relevant type. Sometimes, we need to check
// first, before parsing, that's when we use `peek()`.
//
parsers: {
//
// The `primary` rule is the *entry* and *exit* point of the parser.
// The rules here can appear at any level of the parse tree.
//
// The recursive nature of the grammar is an interplay between the `block`
// rule, which represents `{ ... }`, the `ruleset` rule, and this `primary` rule,
// as represented by this simplified grammar:
//
// primary → (ruleset | rule)+
// ruleset → selector+ block
// block → '{' primary '}'
//
// Only at one point is the primary rule not called from the
// block rule: at the root level.
//
primary: function () {
var node, root = [];
while ((node = $(this.mixin.definition) || $(this.rule) || $(this.ruleset) ||
$(this.mixin.call) || $(this.comment) || $(this.directive))
|| $(/^[\s\n]+/)) {
node && root.push(node);
}
return root;
},
// We create a Comment node for CSS comments `/* */`,
// but keep the LeSS comments `//` silent, by just skipping
// over them.
comment: function () {
var comment;
if (input.charAt(i) !== '/') return;
if (input.charAt(i + 1) === '/') {
return new(tree.Comment)($(/^\/\/.*/), true);
} else if (comment = $(/^\/\*(?:[^*]|\*+[^\/*])*\*+\/\n?/)) {
return new(tree.Comment)(comment);
}
},
//
// Entities are tokens which can be found inside an Expression
//
entities: {
//
// A string, which supports escaping " and '
//
// "milky way" 'he\'s the one!'
//
quoted: function () {
var str, j = i, e;
if (input.charAt(j) === '~') { j++, e = true } // Escaped strings
if (input.charAt(j) !== '"' && input.charAt(j) !== "'") return;
e && $('~');
if (str = $(/^"((?:[^"\\\r\n]|\\.)*)"|'((?:[^'\\\r\n]|\\.)*)'/)) {
return new(tree.Quoted)(str[0], str[1] || str[2], e);
}
},
//
// A catch-all word, such as:
//
// black border-collapse
//
keyword: function () {
var k;
if (k = $(/^[_A-Za-z-][_A-Za-z0-9-]*/)) {
if (tree.colors.hasOwnProperty(k)) {
// detect named color
return new(tree.Color)(tree.colors[k].slice(1));
} else {
return new(tree.Keyword)(k);
}
}
},
//
// A function call
//
// rgb(255, 0, 255)
//
// We also try to catch IE's `alpha()`, but let the `alpha` parser
// deal with the details.
//
// The arguments are parsed with the `entities.arguments` parser.
//
call: function () {
var name, args, index = i;
if (! (name = /^([\w-]+|%|progid:[\w\.]+)\(/.exec(chunks[j]))) return;
name = name[1].toLowerCase();
if (name === 'url') { return null }
else { i += name.length }
if (name === 'alpha') { return $(this.alpha) }
$('('); // Parse the '(' and consume whitespace.
args = $(this.entities.arguments);
if (! $(')')) return;
if (name) { return new(tree.Call)(name, args, index, env.filename) }
},
arguments: function () {
var args = [], arg;
while (arg = $(this.entities.assignment) || $(this.expression)) {
args.push(arg);
if (! $(',')) { break }
}
return args;
},
literal: function () {
return $(this.entities.dimension) ||
$(this.entities.color) ||
$(this.entities.quoted);
},
// Assignments are argument entities for calls.
// They are present in ie filter properties as shown below.
//
// filter: progid:DXImageTransform.Microsoft.Alpha( *opacity=50* )
//
assignment: function () {
var key, value;
if ((key = $(/^\w+(?=\s?=)/i)) && $('=') && (value = $(this.entity))) {
return new(tree.Assignment)(key, value);
}
},
//
// Parse url() tokens
//
// We use a specific rule for urls, because they don't really behave like
// standard function calls. The difference is that the argument doesn't have
// to be enclosed within a string, so it can't be parsed as an Expression.
//
url: function () {
var value;
if (input.charAt(i) !== 'u' || !$(/^url\(/)) return;
value = $(this.entities.quoted) || $(this.entities.variable) ||
$(this.entities.dataURI) || $(/^[-\w%@$\/.&=:;#+?~]+/) || "";
expect(')');
return new(tree.URL)((value.value || value.data || value instanceof tree.Variable)
? value : new(tree.Anonymous)(value), imports.paths);
},
dataURI: function () {
var obj;
if ($(/^data:/)) {
obj = {};
obj.mime = $(/^[^\/]+\/[^,;)]+/) || '';
obj.charset = $(/^;\s*charset=[^,;)]+/) || '';
obj.base64 = $(/^;\s*base64/) || '';
obj.data = $(/^,\s*[^)]+/);
if (obj.data) { return obj }
}
},
//
// A Variable entity, such as `@fink`, in
//
// width: @fink + 2px
//
// We use a different parser for variable definitions,
// see `parsers.variable`.
//
variable: function () {
var name, index = i;
if (input.charAt(i) === '@' && (name = $(/^@@?[\w-]+/))) {
return new(tree.Variable)(name, index, env.filename);
}
},
//
// A Hexadecimal color
//
// #4F3C2F
//
// `rgb` and `hsl` colors are parsed through the `entities.call` parser.
//
color: function () {
var rgb;
if (input.charAt(i) === '#' && (rgb = $(/^#([a-fA-F0-9]{6}|[a-fA-F0-9]{3})/))) {
return new(tree.Color)(rgb[1]);
}
},
//
// A Dimension, that is, a number and a unit
//
// 0.5em 95%
//
dimension: function () {
var value, c = input.charCodeAt(i);
if ((c > 57 || c < 45) || c === 47) return;
if (value = $(/^(-?\d*\.?\d+)(px|%|em|rem|pc|ex|in|deg|s|ms|pt|cm|mm|rad|grad|turn)?/)) {
return new(tree.Dimension)(value[1], value[2]);
}
},
//
// JavaScript code to be evaluated
//
// `window.location.href`
//
javascript: function () {
var str, j = i, e;
if (input.charAt(j) === '~') { j++, e = true } // Escaped strings
if (input.charAt(j) !== '`') { return }
e && $('~');
if (str = $(/^`([^`]*)`/)) {
return new(tree.JavaScript)(str[1], i, e);
}
}
},
//
// The variable part of a variable definition. Used in the `rule` parser
//
// @fink:
//
variable: function () {
var name;
if (input.charAt(i) === '@' && (name = $(/^(@[\w-]+)\s*:/))) { return name[1] }
},
//
// A font size/line-height shorthand
//
// small/12px
//
// We need to peek first, or we'll match on keywords and dimensions
//
shorthand: function () {
var a, b;
if (! peek(/^[@\w.%-]+\/[@\w.-]+/)) return;
if ((a = $(this.entity)) && $('/') && (b = $(this.entity))) {
return new(tree.Shorthand)(a, b);
}
},
//
// Mixins
//
mixin: {
//
// A Mixin call, with an optional argument list
//
// #mixins > .square(#fff);
// .rounded(4px, black);
// .button;
//
// The `while` loop is there because mixins can be
// namespaced, but we only support the child and descendant
// selector for now.
//
call: function () {
var elements = [], e, c, args, index = i, s = input.charAt(i), important = false;
if (s !== '.' && s !== '#') { return }
while (e = $(/^[#.](?:[\w-]|\\(?:[a-fA-F0-9]{1,6} ?|[^a-fA-F0-9]))+/)) {
elements.push(new(tree.Element)(c, e, i));
c = $('>');
}
$('(') && (args = $(this.entities.arguments)) && $(')');
if ($(this.important)) {
important = true;
}
if (elements.length > 0 && ($(';') || peek('}'))) {
return new(tree.mixin.Call)(elements, args || [], index, env.filename, important);
}
},
//
// A Mixin definition, with a list of parameters
//
// .rounded (@radius: 2px, @color) {
// ...
// }
//
// Until we have a finer grained state-machine, we have to
// do a look-ahead, to make sure we don't have a mixin call.
// See the `rule` function for more information.
//
// We start by matching `.rounded (`, and then proceed on to
// the argument list, which has optional default values.
// We store the parameters in `params`, with a `value` key,
// if there is a value, such as in the case of `@radius`.
//
// Once we've got our params list, and a closing `)`, we parse
// the `{...}` block.
//
definition: function () {
var name, params = [], match, ruleset, param, value, cond, variadic = false;
if ((input.charAt(i) !== '.' && input.charAt(i) !== '#') ||
peek(/^[^{]*(;|})/)) return;
save();
if (match = $(/^([#.](?:[\w-]|\\(?:[a-fA-F0-9]{1,6} ?|[^a-fA-F0-9]))+)\s*\(/)) {
name = match[1];
do {
if (input.charAt(i) === '.' && $(/^\.{3}/)) {
variadic = true;
break;
} else if (param = $(this.entities.variable) || $(this.entities.literal)
|| $(this.entities.keyword)) {
// Variable
if (param instanceof tree.Variable) {
if ($(':')) {
value = expect(this.expression, 'expected expression');
params.push({ name: param.name, value: value });
} else if ($(/^\.{3}/)) {
params.push({ name: param.name, variadic: true });
variadic = true;
break;
} else {
params.push({ name: param.name });
}
} else {
params.push({ value: param });
}
} else {
break;
}
} while ($(','))
expect(')');
if ($(/^when/)) { // Guard
cond = expect(this.conditions, 'expected condition');
}
ruleset = $(this.block);
if (ruleset) {
return new(tree.mixin.Definition)(name, params, ruleset, cond, variadic);
} else {
restore();
}
}
}
},
//
// Entities are the smallest recognized token,
// and can be found inside a rule's value.
//
entity: function () {
return $(this.entities.literal) || $(this.entities.variable) || $(this.entities.url) ||
$(this.entities.call) || $(this.entities.keyword) || $(this.entities.javascript) ||
$(this.comment);
},
//
// A Rule terminator. Note that we use `peek()` to check for '}',
// because the `block` rule will be expecting it, but we still need to make sure
// it's there, if ';' was ommitted.
//
end: function () {
return $(';') || peek('}');
},
//
// IE's alpha function
//
// alpha(opacity=88)
//
alpha: function () {
var value;
if (! $(/^\(opacity=/i)) return;
if (value = $(/^\d+/) || $(this.entities.variable)) {
expect(')');
return new(tree.Alpha)(value);
}
},
//
// A Selector Element
//
// div
// + h1
// #socks
// input[type="text"]
//
// Elements are the building blocks for Selectors,
// they are made out of a `Combinator` (see combinator rule),
// and an element name, such as a tag a class, or `*`.
//
element: function () {
var e, t, c, v;
c = $(this.combinator);
e = $(/^(?:\d+\.\d+|\d+)%/) || $(/^(?:[.#]?|:*)(?:[\w-]|\\(?:[a-fA-F0-9]{1,6} ?|[^a-fA-F0-9]))+/) ||
$('*') || $(this.attribute) || $(/^\([^)@]+\)/);
if (! e) {
$('(') && (v = $(this.entities.variable)) && $(')') && (e = new(tree.Paren)(v));
}
if (e) { return new(tree.Element)(c, e, i) }
if (c.value && c.value.charAt(0) === '&') {
return new(tree.Element)(c, null, i);
}
},
//
// Combinators combine elements together, in a Selector.
//
// Because our parser isn't white-space sensitive, special care
// has to be taken, when parsing the descendant combinator, ` `,
// as it's an empty space. We have to check the previous character
// in the input, to see if it's a ` ` character. More info on how
// we deal with this in *combinator.js*.
//
combinator: function () {
var match, c = input.charAt(i);
if (c === '>' || c === '+' || c === '~') {
i++;
while (input.charAt(i) === ' ') { i++ }
return new(tree.Combinator)(c);
} else if (c === '&') {
match = '&';
i++;
if(input.charAt(i) === ' ') {
match = '& ';
}
while (input.charAt(i) === ' ') { i++ }
return new(tree.Combinator)(match);
} else if (input.charAt(i - 1) === ' ') {
return new(tree.Combinator)(" ");
} else {
return new(tree.Combinator)(null);
}
},
//
// A CSS Selector
//
// .class > div + h1
// li a:hover
//
// Selectors are made out of one or more Elements, see above.
//
selector: function () {
var sel, e, elements = [], c, match;
if ($('(')) {
sel = $(this.entity);
expect(')');
return new(tree.Selector)([new(tree.Element)('', sel, i)]);
}
while (e = $(this.element)) {
c = input.charAt(i);
elements.push(e)
if (c === '{' || c === '}' || c === ';' || c === ',') { break }
}
if (elements.length > 0) { return new(tree.Selector)(elements) }
},
tag: function () {
return $(/^[a-zA-Z][a-zA-Z-]*[0-9]?/) || $('*');
},
attribute: function () {
var attr = '', key, val, op;
if (! $('[')) return;
if (key = $(/^[a-zA-Z-]+/) || $(this.entities.quoted)) {
if ((op = $(/^[|~*$^]?=/)) &&
(val = $(this.entities.quoted) || $(/^[\w-]+/))) {
attr = [key, op, val.toCSS ? val.toCSS() : val].join('');
} else { attr = key }
}
if (! $(']')) return;
if (attr) { return "[" + attr + "]" }
},
//
// The `block` rule is used by `ruleset` and `mixin.definition`.
// It's a wrapper around the `primary` rule, with added `{}`.
//
block: function () {
var content;
if ($('{') && (content = $(this.primary)) && $('}')) {
return content;
}
},
//
// div, .class, body > p {...}
//
ruleset: function () {
var selectors = [], s, rules, match;
save();
while (s = $(this.selector)) {
selectors.push(s);
$(this.comment);
if (! $(',')) { break }
$(this.comment);
}
if (selectors.length > 0 && (rules = $(this.block))) {
return new(tree.Ruleset)(selectors, rules, env.strictImports);
} else {
// Backtrack
furthest = i;
restore();
}
},
rule: function () {
var name, value, c = input.charAt(i), important, match;
save();
if (c === '.' || c === '#' || c === '&') { return }
if (name = $(this.variable) || $(this.property)) {
if ((name.charAt(0) != '@') && (match = /^([^@+\/'"*`(;{}-]*);/.exec(chunks[j]))) {
i += match[0].length - 1;
value = new(tree.Anonymous)(match[1]);
} else if (name === "font") {
value = $(this.font);
} else {
value = $(this.value);
}
important = $(this.important);
if (value && $(this.end)) {
return new(tree.Rule)(name, value, important, memo);
} else {
furthest = i;
restore();
}
}
},
//
// An @import directive
//
// @import "lib";
//
// Depending on our environemnt, importing is done differently:
// In the browser, it's an XHR request, in Node, it would be a
// file-system operation. The function used for importing is
// stored in `import`, which we pass to the Import constructor.
//
"import": function () {
var path, features, index = i;
var dir = $(/^@import(?:-(once))?\s+/);
if (dir && (path = $(this.entities.quoted) || $(this.entities.url))) {
features = $(this.mediaFeatures);
if ($(';')) {
return new(tree.Import)(path, imports, features, (dir[1] === 'once'), index);
}
}
},
mediaFeature: function () {
var e, p, nodes = [];
do {
if (e = $(this.entities.keyword)) {
nodes.push(e);
} else if ($('(')) {
p = $(this.property);
e = $(this.entity);
if ($(')')) {
if (p && e) {
nodes.push(new(tree.Paren)(new(tree.Rule)(p, e, null, i, true)));
} else if (e) {
nodes.push(new(tree.Paren)(e));
} else {
return null;
}
} else { return null }
}
} while (e);
if (nodes.length > 0) {
return new(tree.Expression)(nodes);
}
},
mediaFeatures: function () {
var e, features = [];
do {
if (e = $(this.mediaFeature)) {
features.push(e);
if (! $(',')) { break }
} else if (e = $(this.entities.variable)) {
features.push(e);
if (! $(',')) { break }
}
} while (e);
return features.length > 0 ? features : null;
},
media: function () {
var features, rules;
if ($(/^@media/)) {
features = $(this.mediaFeatures);
if (rules = $(this.block)) {
return new(tree.Media)(rules, features);
}
}
},
//
// A CSS Directive
//
// @charset "utf-8";
//
directive: function () {
var name, value, rules, types, e, nodes;
if (input.charAt(i) !== '@') return;
if (value = $(this['import']) || $(this.media)) {
return value;
} else if (name = $(/^@page|@keyframes/) || $(/^@(?:-webkit-|-moz-|-o-|-ms-)[a-z0-9-]+/)) {
types = ($(/^[^{]+/) || '').trim();
if (rules = $(this.block)) {
return new(tree.Directive)(name + " " + types, rules);
}
} else if (name = $(/^@[-a-z]+/)) {
if (name === '@font-face') {
if (rules = $(this.block)) {
return new(tree.Directive)(name, rules);
}
} else if ((value = $(this.entity)) && $(';')) {
return new(tree.Directive)(name, value);
}
}
},
font: function () {
var value = [], expression = [], weight, shorthand, font, e;
while (e = $(this.shorthand) || $(this.entity)) {
expression.push(e);
}
value.push(new(tree.Expression)(expression));
if ($(',')) {
while (e = $(this.expression)) {
value.push(e);
if (! $(',')) { break }
}
}
return new(tree.Value)(value);
},
//
// A Value is a comma-delimited list of Expressions
//
// font-family: Baskerville, Georgia, serif;
//
// In a Rule, a Value represents everything after the `:`,
// and before the `;`.
//
value: function () {
var e, expressions = [], important;
while (e = $(this.expression)) {
expressions.push(e);
if (! $(',')) { break }
}
if (expressions.length > 0) {
return new(tree.Value)(expressions);
}
},
important: function () {
if (input.charAt(i) === '!') {
return $(/^! *important/);
}
},
sub: function () {
var e;
if ($('(') && (e = $(this.expression)) && $(')')) {
return e;
}
},
multiplication: function () {
var m, a, op, operation;
if (m = $(this.operand)) {
while (!peek(/^\/\*/) && (op = ($('/') || $('*'))) && (a = $(this.operand))) {
operation = new(tree.Operation)(op, [operation || m, a]);
}
return operation || m;
}
},
addition: function () {
var m, a, op, operation;
if (m = $(this.multiplication)) {
while ((op = $(/^[-+]\s+/) || (input.charAt(i - 1) != ' ' && ($('+') || $('-')))) &&
(a = $(this.multiplication))) {
operation = new(tree.Operation)(op, [operation || m, a]);
}
return operation || m;
}
},
conditions: function () {
var a, b, index = i, condition;
if (a = $(this.condition)) {
while ($(',') && (b = $(this.condition))) {
condition = new(tree.Condition)('or', condition || a, b, index);
}
return condition || a;
}
},
condition: function () {
var a, b, c, op, index = i, negate = false;
if ($(/^not/)) { negate = true }
expect('(');
if (a = $(this.addition) || $(this.entities.keyword) || $(this.entities.quoted)) {
if (op = $(/^(?:>=|=<|[<=>])/)) {
if (b = $(this.addition) || $(this.entities.keyword) || $(this.entities.quoted)) {
c = new(tree.Condition)(op, a, b, index, negate);
} else {
error('expected expression');
}
} else {
c = new(tree.Condition)('=', a, new(tree.Keyword)('true'), index, negate);
}
expect(')');
return $(/^and/) ? new(tree.Condition)('and', c, $(this.condition)) : c;
}
},
//
// An operand is anything that can be part of an operation,
// such as a Color, or a Variable
//
operand: function () {
var negate, p = input.charAt(i + 1);
if (input.charAt(i) === '-' && (p === '@' || p === '(')) { negate = $('-') }
var o = $(this.sub) || $(this.entities.dimension) ||
$(this.entities.color) || $(this.entities.variable) ||
$(this.entities.call);
return negate ? new(tree.Operation)('*', [new(tree.Dimension)(-1), o])
: o;
},
//
// Expressions either represent mathematical operations,
// or white-space delimited Entities.
//
// 1px solid black
// @var * 2
//
expression: function () {
var e, delim, entities = [], d;
while (e = $(this.addition) || $(this.entity)) {
entities.push(e);
}
if (entities.length > 0) {
return new(tree.Expression)(entities);
}
},
property: function () {
var name;
if (name = $(/^(\*?-?[-a-z_0-9]+)\s*:/)) {
return name[1];
}
}
}
};
};
if (less.mode === 'browser' || less.mode === 'rhino') {
//
// Used by `@import` directives
//
less.Parser.importer = function (path, paths, callback, env) {
if (!/^([a-z]+:)?\//.test(path) && paths.length > 0) {
path = paths[0] + path;
}
// We pass `true` as 3rd argument, to force the reload of the import.
// This is so we can get the syntax tree as opposed to just the CSS output,
// as we need this to evaluate the current stylesheet.
loadStyleSheet({ href: path, title: path, type: env.mime }, function (e) {
if (e && typeof(env.errback) === "function") {
env.errback.call(null, path, paths, callback, env);
} else {
callback.apply(null, arguments);
}
}, true);
};
}
| qvuilliot/less.js | lib/less/parser.js | JavaScript | apache-2.0 | 49,427 |
package zoara.sfs2x.extension;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Arrays;
import zoara.sfs2x.extension.simulation.World;
import zoara.sfs2x.extension.utils.RoomHelper;
import com.smartfoxserver.bitswarm.sessions.ISession;
import com.smartfoxserver.v2.core.ISFSEvent;
import com.smartfoxserver.v2.core.SFSEventParam;
import com.smartfoxserver.v2.db.IDBManager;
import com.smartfoxserver.v2.exceptions.SFSErrorCode;
import com.smartfoxserver.v2.exceptions.SFSErrorData;
import com.smartfoxserver.v2.exceptions.SFSException;
import com.smartfoxserver.v2.exceptions.SFSLoginException;
import com.smartfoxserver.v2.extensions.BaseServerEventHandler;
import com.smartfoxserver.v2.security.DefaultPermissionProfile;
public class LoginEventHandler extends BaseServerEventHandler
{
@Override
public void handleServerEvent(ISFSEvent event) throws SFSException
{
// Grab parameters from client request
String userName = (String) event.getParameter(SFSEventParam.LOGIN_NAME);
String cryptedPass = (String) event.getParameter(SFSEventParam.LOGIN_PASSWORD);
ISession session = (ISession) event.getParameter(SFSEventParam.SESSION);
// Get password from DB
IDBManager dbManager = getParentExtension().getParentZone().getDBManager();
Connection connection;
try
{
// Grab a connection from the DBManager connection pool
connection = dbManager.getConnection();
// Build a prepared statement
PreparedStatement stmt = connection.prepareStatement(
"SELECT Password, ID, ClanID, Zone FROM player_info WHERE Username = ?"
);
stmt.setString(1, userName);
// Execute query
ResultSet res = stmt.executeQuery();
// Verify that one record was found
if (!res.first())
{
// This is the part that goes to the client
SFSErrorData errData = new SFSErrorData(SFSErrorCode.LOGIN_BAD_USERNAME);
errData.addParameter(userName);
// This is logged on the server side
throw new SFSLoginException("Bad username: " + userName, errData);
}
String dbpassword = res.getString("Password");
int dbId = res.getInt("ID");
//String zone = res.getString("Zone");
int clanId = res.getInt("ClanID");
String zone = res.getString("Zone");
// Return connection to the DBManager connection pool
connection.close();
String thisZone = getParentExtension().getParentZone().getName();
if ((zone.equals("Adult") && !zone.equals(thisZone)) ||
(!zone.equals("Adult") && thisZone.equals("Adult")))
{
SFSErrorData data = new SFSErrorData(SFSErrorCode.JOIN_GAME_ACCESS_DENIED);
data.addParameter(thisZone);
throw new SFSLoginException("Login failed. User " + userName +
" is not a member of Server " + thisZone, data);
}
World world = RoomHelper.getWorld(this);
if (world.hasPlayer(userName))
{
SFSErrorData data = new SFSErrorData(SFSErrorCode.LOGIN_ALREADY_LOGGED);
String[] params = { userName, thisZone };
data.setParams(Arrays.asList(params));
throw new SFSLoginException("Login failed: " + userName +
" is already logged in!", data);
}
// Verify the secure password
if (!getApi().checkSecurePassword(session, dbpassword, cryptedPass))
{
if (dbId < 10)
{
trace("Passwords did not match, but logging in anyway.");
}
else
{
SFSErrorData data = new SFSErrorData(SFSErrorCode.LOGIN_BAD_PASSWORD);
data.addParameter(userName);
throw new SFSLoginException("Login failed for user: " + userName, data);
}
}
// Store the client dbId in the session
session.setProperty(ZoaraExtension.DATABASE_ID, dbId);
if (clanId != 0) {
session.setProperty(ZoaraExtension.CLAN_ID, clanId);
}
session.setProperty("$permission", DefaultPermissionProfile.STANDARD);
}
catch (SQLException e) // User name was not found
{
SFSErrorData errData = new SFSErrorData(SFSErrorCode.GENERIC_ERROR);
errData.addParameter("SQL Error: " + e.getMessage());
throw new SFSLoginException("A SQL Error occurred: " + e.getMessage(), errData);
}
}
}
| xiehan/zoara-server | src/zoara/sfs2x/extension/LoginEventHandler.java | Java | apache-2.0 | 4,312 |
package esilatest
/*
Summary of kills done by the given corporation against enemy factions */
type GetCorporationsCorporationIdFwStatsKills struct {
/*
Last week's total number of kills by members of the given corporation against enemy factions */
LastWeek int32 `json:"last_week,omitempty"`
/*
Total number of kills by members of the given corporation against enemy factions since the corporation enlisted */
Total int32 `json:"total,omitempty"`
/*
Yesterday's total number of kills by members of the given corporation against enemy factions */
Yesterday int32 `json:"yesterday,omitempty"`
}
| antihax/mock-esi | latest/go/model_get_corporations_corporation_id_fw_stats_kills.go | GO | apache-2.0 | 606 |
using FinesSE.Contracts.Infrastructure;
using FinesSE.Contracts.Invokable;
using FinesSE.Core.WebDriver;
namespace FinesSE.Outil.Actions
{
public class Focus : IVoidAction
{
public IExecutionContext Context { get; set; }
[EntryPoint]
public void Invoke(LocatedElements elements)
=> elements
.ConstraintCount(c => c == 1)
.Elements
.ForEach(x => x.SendKeys(""));
}
}
| MirekVales/FinesSE | FinesSE/FinesSE.Outil/Actions/Focus.cs | C# | apache-2.0 | 455 |
import socket
import re
from xii import error, util
# sample validator
# keys = Dict(
# [
# RequiredKey("foo", String(), desc="A string to manipulate something"),
# Key("bar", String(), desc="something usefull")
# ],
# desc="Implement this stuff as you want"
# )
class Validator():
def __init__(self, example=None, description=None):
self._description = description
self._example = example
def structure(self, accessor):
if accessor == "example":
return self._example
return self._description
class TypeCheck(Validator):
want_type = None
want = "none"
def __init__(self, example, desc=None):
if desc is None:
desc = self.want
Validator.__init__(self, example, desc)
def validate(self, pre, structure):
if isinstance(structure, self.want_type):
return True
raise error.ValidatorError("{} needs to be {}".format(pre, self.want))
return False
class Int(TypeCheck):
want = "int"
want_type = int
class Bool(TypeCheck):
want = "bool"
want_type = bool
class String(TypeCheck):
want = "string"
want_type = str
class Ip(TypeCheck):
want = "ip"
want_type = str
def validate(self, pre, structure):
TypeCheck.validate(self, pre, structure)
try:
socket.inet_pton(socket.AF_INET, structure)
return True
except socket.error:
try:
socket.inet_pton(socket.AF_INET6, structure)
return True
except socket.error:
pass
raise error.ValidatorError("{} is not a valid IP address".format(pre))
class ByteSize(TypeCheck):
want = "memory"
want_type = str
validator = re.compile("(?P<value>\d+)(\ *)(?P<unit>[kMGT])")
def validate(self, pre, structure):
TypeCheck.validate(self, pre, structure)
if self.validator.match(structure):
return True
else:
raise error.ValidatorError("{} is not a valid memory size".format(pre))
class List(TypeCheck):
want = "list"
want_type = list
def __init__(self, schema, desc=None):
TypeCheck.__init__(self, desc)
self.schema = schema
def validate(self, pre, structure):
TypeCheck.validate(self, pre, structure)
def _validate_each(item):
return self.schema.validate(pre, item)
return sum(map(_validate_each, structure)) > 1
def structure(self, accessor):
return [self.schema.structure(accessor)]
class Or(Validator):
def __init__(self, schemas, desc=None, exclusive=True):
Validator.__init__(self, desc)
self.schemas = schemas
self.exclusive = exclusive
def validate(self, pre, structure):
errors = []
def _validate_each(schema):
try:
return schema.validate(pre, structure)
except error.ValidatorError as err:
errors.append(err)
return False
state = sum(map(_validate_each, self.schemas))
if self.exclusive and (state > 1 or state == 0):
def _error_lines():
it = iter(errors)
yield " ".join(next(it).error())
for err in it:
yield "or"
yield " ".join(err.error())
raise error.ValidatorError(["{} is ambigous:".format(pre)] +
list(_error_lines()))
return True
def structure(self, accessor):
desc = []
descs = [ s.structure(accessor) for s in self.schemas ]
for d in descs[:-1]:
desc.append(d)
desc.append("__or__")
desc.append(descs[-1])
return desc
# Key validators --------------------------------------------------------------
class KeyValidator(Validator):
def structure(self, accessor, overwrite=None):
name = self.name
if overwrite:
name = overwrite
return ("{}".format(name), self.schema.structure(accessor))
class VariableKeys(KeyValidator):
def __init__(self, schema, example, desc=None):
KeyValidator.__init__(self, desc, example)
self.name = "*"
self.example = example
self.schema = schema
def validate(self, pre, structure):
if not isinstance(structure, dict):
raise error.ValidatorError("{} needs to be a dict".format(pre))
def _validate_each(pair):
(name, next_structure) = pair
return self.schema.validate(pre + " > " + name, next_structure)
return sum(map(_validate_each, structure.items())) >= 1
def structure(self, accessor):
if accessor == "example":
return KeyValidator.structure(self, accessor, self.example)
return KeyValidator.structure(self, accessor)
class Key(KeyValidator):
def __init__(self, name, schema, desc=None, example=None):
KeyValidator.__init__(self, desc, example)
self.name = name
self.schema = schema
def validate(self, pre, structure):
if not isinstance(structure, dict):
raise error.ValidatorError("{} needs to be a dict".format(pre))
value_of_key = util.safe_get(self.name, structure)
if not value_of_key:
return False
return self.schema.validate(pre + " > " + self.name, value_of_key)
class RequiredKey(KeyValidator):
def __init__(self, name, schema, desc=None, example=None):
Validator.__init__(self, desc, example)
self.name = name
self.schema = schema
def validate(self, pre, structure):
value_of_key = util.safe_get(self.name, structure)
if not value_of_key:
raise error.ValidatorError("{} must have {} "
"defined".format(pre, self.name))
return self.schema.validate(pre + " > " + self.name, value_of_key)
class Dict(TypeCheck):
want = "dictonary"
want_type = dict
def __init__(self, schemas, desc=None):
TypeCheck.__init__(self, desc)
self.schemas = schemas
def validate(self, pre, structure):
TypeCheck.validate(self, pre, structure)
def _validate(schema):
return schema.validate(pre, structure)
return sum(map(_validate, self.schemas)) >= 1
def structure(self, accessor):
desc_dict = {}
for key, value in [s.structure(accessor) for s in self.schemas]:
desc_dict[key] = value
return desc_dict
| xii/xii | src/xii/validator.py | Python | apache-2.0 | 6,612 |
/**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#include <aws/iotwireless/model/MulticastGroup.h>
#include <aws/core/utils/json/JsonSerializer.h>
#include <utility>
using namespace Aws::Utils::Json;
using namespace Aws::Utils;
namespace Aws
{
namespace IoTWireless
{
namespace Model
{
MulticastGroup::MulticastGroup() :
m_idHasBeenSet(false),
m_arnHasBeenSet(false),
m_nameHasBeenSet(false)
{
}
MulticastGroup::MulticastGroup(JsonView jsonValue) :
m_idHasBeenSet(false),
m_arnHasBeenSet(false),
m_nameHasBeenSet(false)
{
*this = jsonValue;
}
MulticastGroup& MulticastGroup::operator =(JsonView jsonValue)
{
if(jsonValue.ValueExists("Id"))
{
m_id = jsonValue.GetString("Id");
m_idHasBeenSet = true;
}
if(jsonValue.ValueExists("Arn"))
{
m_arn = jsonValue.GetString("Arn");
m_arnHasBeenSet = true;
}
if(jsonValue.ValueExists("Name"))
{
m_name = jsonValue.GetString("Name");
m_nameHasBeenSet = true;
}
return *this;
}
JsonValue MulticastGroup::Jsonize() const
{
JsonValue payload;
if(m_idHasBeenSet)
{
payload.WithString("Id", m_id);
}
if(m_arnHasBeenSet)
{
payload.WithString("Arn", m_arn);
}
if(m_nameHasBeenSet)
{
payload.WithString("Name", m_name);
}
return payload;
}
} // namespace Model
} // namespace IoTWireless
} // namespace Aws
| aws/aws-sdk-cpp | aws-cpp-sdk-iotwireless/source/model/MulticastGroup.cpp | C++ | apache-2.0 | 1,434 |
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Http;
using Microsoft.Extensions.DependencyInjection;
namespace webApp
{
public class Startup
{
// This method gets called by the runtime. Use this method to add services to the container.
// For more information on how to configure your application, visit https://go.microsoft.com/fwlink/?LinkID=398940
public void ConfigureServices(IServiceCollection services)
{
services.AddMvc();
}
// This method gets called by the runtime. Use this method to configure the HTTP request pipeline.
public void Configure(IApplicationBuilder app)
{
app.UseMvc(routeBuilder =>
routeBuilder.MapRoute("default", "{controller}/{action}")
);
var defaultFilesOptions = new DefaultFilesOptions();
defaultFilesOptions.DefaultFileNames.Clear();
defaultFilesOptions.DefaultFileNames.Add("index.html");
app.UseDefaultFiles(defaultFilesOptions);
app.UseStaticFiles();
app.Run(async (context) =>
{
await context.Response.SendFileAsync("wwwroot/notfound.html");
});
}
}
}
| Bigsby/NetCore | src/webApp/Startup.cs | C# | apache-2.0 | 1,334 |
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from email.mime import text
import email.utils
import smtplib
import socket
import mailjet_rest
from scoreboard import main
app = main.get_app()
class MailFailure(Exception):
"""Inability to send mail."""
pass
def send(message, subject, to, to_name=None, sender=None, sender_name=None):
"""Send an email."""
sender = sender or app.config.get('MAIL_FROM')
sender_name = sender_name or app.config.get('MAIL_FROM_NAME') or ''
mail_provider = app.config.get('MAIL_PROVIDER')
if mail_provider is None:
app.logger.error('No MAIL_PROVIDER configured!')
raise MailFailure('No MAIL_PROVIDER configured!')
elif mail_provider == 'smtp':
_send_smtp(message, subject, to, to_name, sender, sender_name)
elif mail_provider == 'mailjet':
_send_mailjet(message, subject, to, to_name, sender, sender_name)
else:
app.logger.error('Invalid MAIL_PROVIDER configured!')
raise MailFailure('Invalid MAIL_PROVIDER configured!')
def _send_smtp(message, subject, to, to_name, sender, sender_name):
"""SMTP implementation of sending email."""
host = app.config.get('MAIL_HOST')
if not host:
raise MailFailure('SMTP Server Not Configured')
try:
server = smtplib.SMTP(host)
except (smtplib.SMTPConnectError, socket.error) as ex:
app.logger.error('Unable to send mail: %s', str(ex))
raise MailFailure('Error connecting to SMTP server.')
msg = text.MIMEText(message)
msg['Subject'] = subject
msg['To'] = email.utils.formataddr((to_name, to))
msg['From'] = email.utils.formataddr((sender_name, sender))
try:
if app.debug:
server.set_debuglevel(True)
server.sendmail(sender, [to], msg.as_string())
except (smtplib.SMTPException, socket.error) as ex:
app.logger.error('Unable to send mail: %s', str(ex))
raise MailFailure('Error sending mail to SMTP server.')
finally:
try:
server.quit()
except smtplib.SMTPException:
pass
def _send_mailjet(message, subject, to, to_name, sender, sender_name):
"""Mailjet implementation of sending email."""
api_key = app.config.get('MJ_APIKEY_PUBLIC')
api_secret = app.config.get('MJ_APIKEY_PRIVATE')
if not api_key or not api_secret:
app.logger.error('Missing MJ_APIKEY_PUBLIC/MJ_APIKEY_PRIVATE!')
return
# Note the data structures we use are api v3.1
client = mailjet_rest.Client(
auth=(api_key, api_secret),
api_url='https://api.mailjet.com/',
version='v3.1')
from_obj = {
"Email": sender,
}
if sender_name:
from_obj["Name"] = sender_name
to_obj = [{
"Email": to,
}]
if to_name:
to_obj[0]["Name"] = to_name
message = {
"From": from_obj,
"To": to_obj,
"Subject": subject,
"TextPart": message,
}
result = client.send.create(data={'Messages': [message]})
if result.status_code != 200:
app.logger.error(
'Error sending via mailjet: (%d) %r',
result.status_code, result.text)
raise MailFailure('Error sending via mailjet!')
try:
j = result.json()
except Exception:
app.logger.error('Error sending via mailjet: %r', result.text)
raise MailFailure('Error sending via mailjet!')
if j['Messages'][0]['Status'] != 'success':
app.logger.error('Error sending via mailjet: %r', j)
raise MailFailure('Error sending via mailjet!')
| google/ctfscoreboard | scoreboard/mail.py | Python | apache-2.0 | 4,180 |
package org.mcxa.vortaro;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* Example local unit test, which will execute on the development machine (host).
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
public class ExampleUnitTest {
@Test
public void addition_isCorrect() throws Exception {
assertEquals(4, 2 + 2);
}
} | ianmcxa/vortaro | app/src/test/java/org/mcxa/vortaro/ExampleUnitTest.java | Java | apache-2.0 | 394 |
/**
* This looks at static needs parameter in components and waits for the promise to be fullfilled
* It is used to make sure server side rendered pages wait for APIs to resolve before
* returning res.end()
*
* As seen in: https://github.com/caljrimmer/isomorphic-redux-app
*/
export function fetchComponentDataBeforeRender(dispatch, components, params) {
const needs = components.reduce((prev, current) => {
if (!current) {
return prev;
}
return (current.need || [])
.concat((current.WrappedComponent ? current.WrappedComponent.need : []) || [])
.concat(prev);
}, []);
const promises = needs.map(need => dispatch(need(params)));
return Promise.all(promises);
}
| Poniverse/Poniverse.net | app/api/fetchComponentDataBeforeRender.js | JavaScript | apache-2.0 | 710 |
<?php slot('op_sidemenu'); ?>
<?php
$options = array(
'object' => $band,
);
op_include_parts('memberImageBox', 'bandImageBox', $options);
?>
<?php
$options = array(
'title' => __('%band% Members', array('%band%' => $op_term['band']->titleize())),
'list' => $members,
'crownIds' => array($bandAdmin->getId()),
'link_to' => '@member_profile?id=',
'use_op_link_to_member' => true,
'moreInfo' => array(link_to(sprintf('%s(%d)', __('Show all'), $band->countBandMembers()), '@band_memberList?id='.$band->getId())),
);
if ($isAdmin || $isSubAdmin)
{
$options['moreInfo'][] = link_to(__('Management member'), '@band_memberManage?id='.$band->getId());
}
op_include_parts('nineTable', 'frendList', $options);
?>
<?php end_slot(); ?>
<?php slot('op_top') ?>
<?php if ($isBandPreMember) : ?>
<?php op_include_parts('descriptionBox', 'informationAboutBand', array('body' => __('You are waiting for the participation approval by %band%\'s administrator.'))) ?>
<?php endif; ?>
<?php end_slot(); ?>
<div class="parts">
<div class="partsHeading"><h3>Tunes</h3></div>
<table>
<td bgcolor=white>Song</td>
<td bgcolor=white>Aritst</td>
<td bgcolor=white width=40>Link</td>
<td bgcolor=white width=60>Duration</td>
<td bgcolor=white width=136>Event</td>
<?php
foreach($tunes as $tune)
{
echo "<tr>";
echo "<td>" . link_to(nl2br($tune->getTuneName()), "@tune_show?id=".$tune->getId()) . "</td>";
echo "<td>" . nl2br($tune->getArtistName()) . "</td>";
echo "<td>";
if($tune->getUrl())
{
echo link_to(nl2br("link"), $tune->getUrl(), 'target="_blank"');
}
echo "</td><td>";
echo $tune->getDuration();
echo " min</td>";
echo "</td><td>";
if($tune->getEventId() != 0){
$event = Doctrine::getTable('Event')->find($tune->getEventId());
if($event->getImageFileName()){
echo link_to(op_image_tag_sf_image($event->getImageFileName(), array('size' => '136x50')), '@event_home?id='. $tune->getEventId()) . "<br> ";
}
// echo link_to($event->getName(), '@event_home?id='. $tune->getEventId());
}
echo "</td></tr>";
}
?>
</table></div>
<?php //echo link_to('create tune', url_for('@tune_newForBand?band_id='.$band->getId())); ?>
<BR><BR>
</p>
<?php
$list = array(__('%band% Name', array('%band%' => $op_term['band']->titleize())) => $band->getName());
// Official Url
if($band->getOfficialUrl())
{
$official_url = link_to($band->getOfficialUrl(), $band->getOfficialUrl(), 'target="_blank"');
$list += array(__('Official Page') => $official_url );
}
// Member List
if($band->getMemberList())
{
$member_list = nl2br(preg_replace('/<linkoff>(\s)*/i', '', $band->getMemberList()));
$list += array(__('Other Members') => $member_list );
}
if ($band->band_category_id)
{
$list[__('%band% Category', array('%band%' => $op_term['band']->titleize()))] = $band->getBandCategory();
}
$list += array(__('Date Created') => op_format_date($band->getCreatedAt(), 'D'),
__('Administrator') => link_to($bandAdmin->getName(), '@member_profile?id='.$bandAdmin->getId()),
);
$subAdminCaption = '';
foreach ($bandSubAdmins as $m)
{
$subAdminCaption .= "<li>".link_to($m->getName(), '@member_profile?id='.$m->getId())."</li>\n";
}
if ($subAdminCaption)
{
$list[__('Sub Administrator')] = '<ul>'.$subAdminCaption.'</ul>';
}
$list[__('Count of Members')] = $band->countBandMembers();
foreach ($band->getConfigs() as $key => $config)
{
if ('%band% Description' === $key)
{
$list[__('%band% Description', array('%band%' => $op_term['band']->titleize()), 'form_band')] = op_url_cmd(nl2br($band->getConfig('description')));
}
else
{
$list[__($key, array(), 'form_band')] = $config;
}
}
$list[__('Register policy', array(), 'form_band')] = __($band->getRawValue()->getRegisterPolicy());
$options = array(
'title' => __('%band%', array('%band%' => $op_term['band']->titleize())),
'list' => $list,
);
op_include_parts('listBox', 'bandHome', $options);
?>
<ul>
<?php if ($isEditBand): ?>
<li><?php echo link_to(__('Edit this %band%'), '@band_edit?id=' . $band->getId()) ?></li>
<?php endif; ?>
<?php if (!$isAdmin): ?>
<?php if ($isBandMember): ?>
<li><?php echo link_to(__('Leave this %band%'), '@band_quit?id=' . $band->getId()) ?></li>
<?php else : ?>
<li><?php echo link_to(__('Join this %band%'), '@band_join?id=' . $band->getId()) ?></li>
<?php endif; ?>
<?php endif; ?>
</ul>
| sonicdna/sonicdna-dev | plugins/opBandPlugin/apps/pc_frontend/modules/band/templates/homeSuccess.php | PHP | apache-2.0 | 4,421 |
/* eslint-disable no-underscore-dangle, no-param-reassign */
import d3 from 'd3';
require('./sequences.css');
// Modified from http://bl.ocks.org/kerryrodden/7090426
function sunburstVis(slice) {
const container = d3.select(slice.selector);
const render = function () {
// vars with shared scope within this function
const margin = { top: 10, right: 5, bottom: 10, left: 5 };
const containerWidth = slice.width();
const containerHeight = slice.height();
const breadcrumbHeight = containerHeight * 0.085;
const visWidth = containerWidth - margin.left - margin.right;
const visHeight = containerHeight - margin.top - margin.bottom - breadcrumbHeight;
const radius = Math.min(visWidth, visHeight) / 2;
let totalSize;
const vis;
const arcs;
// Dimensions of sunburst.
const width = visWidth;
const height = visHeight;
// Breadcrumb dimensions: width, height, spacing, width of tip/tail.
const b = {
w: 75, h: 30, s: 3, t: 10,
};
// Mapping of step names to colors.
const colors = {};
const sequenceDiv = container.append('div:div').attr('class', 'sequence');
const chartDiv = sequenceDiv.append('div:div').attr('clas', 'chart');
const explanationDiv = chartDiv.append('div:div').attr('clas', 'explanation');
const percentageSPAN = explanationDiv.append('span:span').attr('clas', 'percentage');
percentageSPAN.text('');
// Total size of all segments; we set this later, after loading the data.
totalSize = 0;
vis = container.append('svg:svg')
.attr('width', width)
.attr('height', height)
.append('svg:g')
.attr('class', 'container')
.attr('transform', 'translate(' + width / 2 + ',' + height / 2 + ')');
const partition = d3.layout.partition()
.size([2 * Math.PI, radius * radius])
.value(function (d) { return d.size; });
arcs = d3.svg.arc()
.startAngle(function (d) { return d.x; })
.endAngle(function (d) { return d.x + d.dx; })
.innerRadius(function (d) { return Math.sqrt(d.y); })
.outerRadius(function (d) { return Math.sqrt(d.y + d.dy); });
// Main function to draw and set up the visualization, once we have the data.
function createVisualization(json) {
// Basic setup of page elements.
initializeBreadcrumbTrail();
drawLegend();
d3.select('.togglelegend').on('click', toggleLegend);
// Bounding circle underneath the sunburst, to make it easier to detect
// when the mouse leaves the parent g.
vis.append('svg:circle')
.attr('r', radius)
.style('opacity', 0);
// For efficiency, filter nodes to keep only those large enough to see.
const nodes = partition.nodes(json)
.filter(function (d) {
return (d.dx > 0.005); // 0.005 radians = 0.29 degrees
});
const path = vis.data([json]).selectAll('path')
.data(nodes)
.enter().append('svg:path')
.attr('display', function (d) { return d.depth ? null : 'none'; })
.attr('d', arc)
.attr('fill-rule', 'evenodd')
.style('fill', function (d) { return colors[d.name]; })
.style('opacity', 1)
.on('mouseover', mouseover);
// Add the mouseleave handler to the bounding circle.
d3.select('.container').on('mouseleave', mouseleave);
// Get total size of the tree = value of root node from partition.
totalSize = path.node().__data__.value;
};
// Use d3.text and d3.csv.parseRows so that we do not need to have a header
// row, and can receive the csv as an array of arrays.
d3.text(slice.csvEndpoint(), function (text) {
const csv = d3.csv.parseRows(text);
const json = buildHierarchy(csv);
createVisualization(json);
});
// Fade all but the current sequence, and show it in the breadcrumb trail.
function mouseover(d) {
const percentage = (100 * d.value / totalSize).toPrecision(3);
const percentageString = percentage + '%';
if (percentage < 0.1) {
percentageString = '< 0.1%';
}
d3.select('.percentage')
.text(percentageString);
d3.select('.explanation')
.style('visibility', '');
const sequenceArray = getAncestors(d);
updateBreadcrumbs(sequenceArray, percentageString);
// Fade all the segments.
d3.selectAll('path')
.style('opacity', 0.3);
// Then highlight only those that are an ancestor of the current segment.
vis.selectAll('path')
.filter(function (node) {
return (sequenceArray.indexOf(node) >= 0);
})
.style('opacity', 1);
}
// Restore everything to full opacity when moving off the visualization.
function mouseleave(d) {
// Hide the breadcrumb trail
d3.select('.trail')
.style('visibility', 'hidden');
// Deactivate all segments during transition.
d3.selectAll('path').on('mouseover', null);
// Transition each segment to full opacity and then reactivate it.
d3.selectAll('path')
.transition()
.duration(1000)
.style('opacity', 1)
.each('end', function () {
d3.select(this).on('mouseover', mouseover);
});
d3.select('.explanation')
.style('visibility', 'hidden');
}
// Given a node in a partition layout, return an array of all of its ancestor
// nodes, highest first, but excluding the root.
function getAncestors(node) {
const path = [];
let current = node;
while (current.parent) {
path.unshift(current);
current = current.parent;
}
return path;
}
function initializeBreadcrumbTrail() {
// Add the svg area.
const trail = d3.select('.sequence').append('svg:svg')
.attr('width', width)
.attr('height', 50)
.attr('class', 'trail');
// Add the label at the end, for the percentage.
trail.append('svg:text')
.attr('class', 'endlabel')
.style('fill', '#000');
}
// Generate a string that describes the points of a breadcrumb polygon.
function breadcrumbPoints(d, i) {
const points = [];
points.push('0,0');
points.push(b.w + ',0');
points.push(b.w + b.t + ',' + (b.h / 2));
points.push(b.w + ',' + b.h);
points.push('0,' + b.h);
if (i > 0) { // Leftmost breadcrumb; don't include 6th vertex.
points.push(b.t + ',' + (b.h / 2));
}
return points.join(' ');
}
// Update the breadcrumb trail to show the current sequence and percentage.
function updateBreadcrumbs(nodeArray, percentageString) {
// Data join; key function combines name and depth (= position in sequence).
const g = d3.select('.trail')
.selectAll('g')
.data(nodeArray, function (d) { return d.name + d.depth; });
// Add breadcrumb and label for entering nodes.
const entering = g.enter().append('svg:g');
entering.append('svg:polygon')
.attr('points', breadcrumbPoints)
.style('fill', function (d) { return colors[d.name]; });
entering.append('svg:text')
.attr('x', (b.w + b.t) / 2)
.attr('y', b.h / 2)
.attr('dy', '0.35em')
.attr('text-anchor', 'middle')
.text(function (d) { return d.name; });
// Set position for entering and updating nodes.
g.attr('transform', function (d, i) {
return 'translate(' + i * (b.w + b.s) + ', 0)';
});
// Remove exiting nodes.
g.exit().remove();
// Now move and update the percentage at the end.
d3.select('.trail').select('.endlabel')
.attr('x', (nodeArray.length + 0.5) * (b.w + b.s))
.attr('y', b.h / 2)
.attr('dy', '0.35em')
.attr('text-anchor', 'middle')
.text(percentageString);
// Make the breadcrumb trail visible, if it's hidden.
d3.select('.trail')
.style('visibility', '');
}
function drawLegend() {
// Dimensions of legend item: width, height, spacing, radius of rounded rect.
const li = {
w: 75, h: 30, s: 3, r: 3
};
const legend = d3.select('.legend').append('svg:svg')
.attr('width', li.w)
.attr('height', d3.keys(colors).length * (li.h + li.s));
const g = legend.selectAll('g')
.data(d3.entries(colors))
.enter().append('svg:g')
.attr('transform', function (d, i) {
return 'translate(0,' + i * (li.h + li.s) + ')';
});
g.append('svg:rect')
.attr('rx', li.r)
.attr('ry', li.r)
.attr('width', li.w)
.attr('height', li.h)
.style('fill', function (d) { return d.value; });
g.append('svg:text')
.attr('x', li.w / 2)
.attr('y', li.h / 2)
.attr('dy', '0.35em')
.attr('text-anchor', 'middle')
.text(function (d) { return d.key; });
}
function toggleLegend() {
const legend = d3.select('.legend');
if (legend.style('visibility') == 'hidden') {
legend.style('visibility', '');
} else {
legend.style('visibility', 'hidden');
}
}
// Take a 2-column CSV and transform it into a hierarchical structure suitable
// for a partition layout. The first column is a sequence of step names, from
// root to leaf, separated by hyphens. The second column is a count of how
// often that sequence occurred.
function buildHierarchy(csv) {
const root = {'name': 'root', 'children': []};
for (const i = 0; i < csv.length; i++) {
let sequence = csv[i][0];
let size = +csv[i][1];
if (isNaN(size)) { // e.g. if this is a header row
continue;
}
const parts = sequence.split('-');
let currentNode = root;
for (let j = 0; j < parts.length; j++) {
let children = currentNode['children'];
let nodeName = parts[j];
let childNode;
if (j + 1 < parts.length) {
// Not yet at the end of the sequence; move down the tree.
const foundChild = false;
for (let k = 0; k < children.length; k++) {
if (children[k]['name'] == nodeName) {
childNode = children[k];
foundChild = true;
break;
}
}
// If we don't already have a child node for this branch, create it.
if (!foundChild) {
childNode = {'name': nodeName, 'children': []};
children.push(childNode);
}
currentNode = childNode;
} else {
// Reached the end of the sequence; create a leaf node.
childNode = {'name': nodeName, 'size': size};
children.push(childNode);
}
}
}
return root;
};
};
return {
render,
resize: render,
};
}
module.exports = sunburstVis;
| jeromecn/caravel_viz_full | caravel/assets/visualizations/sequences.js | JavaScript | apache-2.0 | 11,085 |
package org.plantuml.idea.action;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.actionSystem.ToggleAction;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.fileEditor.FileEditorManager;
import com.intellij.openapi.project.DumbAware;
import com.intellij.openapi.project.Project;
import org.plantuml.idea.preview.PlantUmlPreviewPanel;
import org.plantuml.idea.settings.PlantUmlSettings;
import org.plantuml.idea.util.UIUtils;
public class ImageHighlightToggleAction extends ToggleAction implements DumbAware {
@Override
public boolean isSelected(AnActionEvent anActionEvent) {
return PlantUmlSettings.getInstance().isHighlightInImages();
}
@Override
public void setSelected(AnActionEvent anActionEvent, boolean b) {
PlantUmlSettings.getInstance().setHighlightInImages(b);
Project project = anActionEvent.getProject();
PlantUmlPreviewPanel previewPanel = UIUtils.getEditorOrToolWindowPreview(anActionEvent);
Editor selectedTextEditor = UIUtils.getSelectedTextEditor(FileEditorManager.getInstance(project), null);
previewPanel.highlightImages(selectedTextEditor);
}
}
| esteinberg/plantuml4idea | src/org/plantuml/idea/action/ImageHighlightToggleAction.java | Java | apache-2.0 | 1,200 |
/*
* Copyright (C) 2012-2014 Open Source Robotics Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
#include <string>
#include "gazebo/physics/physics.hh"
#include "gazebo/transport/transport.hh"
#include "plugins/SkidSteerDrivePlugin.hh"
using namespace gazebo;
GZ_REGISTER_MODEL_PLUGIN(SkidSteerDrivePlugin)
/////////////////////////////////////////////////
SkidSteerDrivePlugin::SkidSteerDrivePlugin()
{
this->maxForce = 5.0;
this->wheelRadius = 0.0;
this->wheelSeparation = 0.0;
}
/////////////////////////////////////////////////
int SkidSteerDrivePlugin::RegisterJoint(int _index, const std::string &_name)
{
// Bounds checking on index
if (_index < 0 or _index >= NUMBER_OF_WHEELS)
{
gzerr << "Joint index " << _index << " out of bounds [0, "
<< NUMBER_OF_WHEELS << "] in model " << this->model->GetName()
<< "." << std::endl;
}
// Find the specified joint and add it to out list
this->joints[_index] = this->model->GetJoint(_name);
if (!this->joints[_index])
{
gzerr << "Unable to find the " << _name
<< " joint in model " << this->model->GetName() << "." << std::endl;
return 1;
}
// Success!
return 0;
}
/////////////////////////////////////////////////
void SkidSteerDrivePlugin::Load(physics::ModelPtr _model,
sdf::ElementPtr _sdf)
{
this->model = _model;
this->node = transport::NodePtr(new transport::Node());
this->node->Init(this->model->GetWorld()->GetName());
int err = 0;
err += RegisterJoint(RIGHT_FRONT, "right_front");
err += RegisterJoint(RIGHT_REAR, "right_rear");
err += RegisterJoint(LEFT_FRONT, "left_front");
err += RegisterJoint(LEFT_REAR, "left_rear");
if (err > 0)
return;
if (_sdf->HasElement("max_force"))
this->maxForce = _sdf->GetElement("max_force")->Get<double>();
else
gzwarn << "No MaxForce value set in the model sdf, default value is 5.0.\n";
// This assumes that front and rear wheel spacing is identical
this->wheelSeparation = this->joints[RIGHT_FRONT]->GetAnchor(0).Distance(
this->joints[LEFT_FRONT]->GetAnchor(0));
// This assumes that the largest dimension of the wheel is the diameter
// and that all wheels have the same diameter
physics::EntityPtr wheelLink = boost::dynamic_pointer_cast<physics::Entity>(
this->joints[RIGHT_FRONT]->GetChild() );
if (wheelLink)
{
math::Box bb = wheelLink->GetBoundingBox();
this->wheelRadius = bb.GetSize().GetMax() * 0.5;
}
// Validity checks...
if (this->wheelSeparation <= 0)
{
gzerr << "Unable to find the wheel separation distance." << std::endl
<< " This could mean that the right_front link and the left_front "
<< "link are overlapping." << std::endl;
return;
}
if (this->wheelRadius <= 0)
{
gzerr << "Unable to find the wheel radius." << std::endl
<< " This could mean that the sdf is missing a wheel link on "
<< "the right_front joint." << std::endl;
return;
}
this->velSub = this->node->Subscribe(
std::string("~/") + this->model->GetName() + std::string("/vel_cmd"),
&SkidSteerDrivePlugin::OnVelMsg, this);
}
/////////////////////////////////////////////////
void SkidSteerDrivePlugin::OnVelMsg(ConstPosePtr &_msg)
{
// gzmsg << "cmd_vel: " << msg->position().x() << ", "
// << msgs::Convert(msg->orientation()).GetAsEuler().z << std::endl;
for (int i = 0; i < NUMBER_OF_WHEELS; i++)
this->joints[i]->SetMaxForce(0, this->maxForce);
double vel_lin = _msg->position().x() / this->wheelRadius;
double vel_rot = -1 * msgs::Convert(_msg->orientation()).GetAsEuler().z
* (this->wheelSeparation / this->wheelRadius);
this->joints[RIGHT_FRONT]->SetVelocity(0, vel_lin - vel_rot);
this->joints[RIGHT_REAR ]->SetVelocity(0, vel_lin - vel_rot);
this->joints[LEFT_FRONT ]->SetVelocity(0, vel_lin + vel_rot);
this->joints[LEFT_REAR ]->SetVelocity(0, vel_lin + vel_rot);
}
| jonbinney/gazebo_ros_wrapper | plugins/SkidSteerDrivePlugin.cc | C++ | apache-2.0 | 4,572 |
package liquibase.sqlgenerator.core;
import liquibase.change.ColumnConfig;
import liquibase.database.ObjectQuotingStrategy;
import liquibase.database.core.OracleDatabase;
import liquibase.sql.Sql;
import liquibase.statement.DatabaseFunction;
import liquibase.statement.SequenceCurrentValueFunction;
import liquibase.statement.SequenceNextValueFunction;
import liquibase.statement.core.InsertOrUpdateStatement;
import liquibase.statement.core.InsertStatement;
import liquibase.statement.core.UpdateStatement;
import org.junit.Test;
import static org.junit.Assert.*;
public class InsertOrUpdateGeneratorOracleTest {
@Test
public void ContainsInsertStatement() {
OracleDatabase database = new OracleDatabase();
InsertOrUpdateGeneratorOracle generator = new InsertOrUpdateGeneratorOracle();
InsertOrUpdateStatement statement = new InsertOrUpdateStatement("mycatalog", "myschema","mytable","pk_col1");
statement.addColumnValue("pk_col1","value1");
statement.addColumnValue("col2","value2");
statement.addColumnValue("pk_col1","value1");
statement.addColumnValue("col2","value2");
Sql[] sql = generator.generateSql( statement, database, null);
String theSql = sql[0].toSql();
assertTrue(theSql.contains("INSERT INTO mycatalog.mytable (pk_col1, col2) VALUES ('value1', 'value2');"));
assertTrue(theSql.contains("UPDATE mycatalog.mytable"));
String[] sqlLines = theSql.split("\n");
int lineToCheck = 0;
assertEquals("DECLARE", sqlLines[lineToCheck].trim());
lineToCheck++;
assertEquals("v_reccount NUMBER := 0;", sqlLines[lineToCheck].trim());
lineToCheck++;
assertEquals("BEGIN", sqlLines[lineToCheck].trim());
lineToCheck++;
assertEquals("SELECT COUNT(*) INTO v_reccount FROM mycatalog.mytable WHERE pk_col1 = 'value1';", sqlLines[lineToCheck].trim());
lineToCheck++;
assertEquals("IF v_reccount = 0 THEN", sqlLines[lineToCheck].trim());
lineToCheck++;
assertEquals("INSERT INTO mycatalog.mytable (pk_col1, col2) VALUES ('value1', 'value2');", sqlLines[lineToCheck]);
lineToCheck++;
assertEquals("ELSIF v_reccount = 1 THEN", sqlLines[lineToCheck].trim());
lineToCheck++;
assertEquals("UPDATE mycatalog.mytable SET col2 = 'value2' WHERE pk_col1 = 'value1';", sqlLines[lineToCheck].trim());
lineToCheck++;
assertEquals("END IF;", sqlLines[lineToCheck].trim());
lineToCheck++;
assertEquals("END;", sqlLines[lineToCheck].trim());
/*
DECLARE
v_prodcount NUMBER := 0;
BEGIN
-- Check if product with this name already exists
SELECT COUNT (*)
INTO v_prodcount
FROM books WHERE isbn = 12345678;
-- Product does not exist
IF v_prodcount = 0 THEN
-- Insert row into PRODUCT based on arguments passed
INSERT INTO books
VALUES
( 12345678,
98765432,
'Working with Liquibase');
-- Product with this name already exists
ELSIF v_prodcount = 1 THEN
-- Update the existing product with values
-- passed as arguments
UPDATE books
SET author_id = 98765432,
title = 'Working with liquibase'
WHERE isbn = 12345678;
END IF;
END;*/
}
@Test
public void testOnlyUpdateFlag() {
OracleDatabase database = new OracleDatabase();
InsertOrUpdateGeneratorOracle generator = new InsertOrUpdateGeneratorOracle();
InsertOrUpdateStatement statement = new InsertOrUpdateStatement("mycatalog", "myschema", "mytable", "pk_col1", true);
statement.addColumnValue("pk_col1", "value1");
statement.addColumnValue("col2", "value2");
Sql[] sql = generator.generateSql(statement, database, null);
String theSql = sql[0].toSql();
assertFalse("should not have had insert statement", theSql.contains("INSERT INTO mycatalog.mytable (pk_col1, col2) VALUES ('value1', 'value2');"));
assertTrue("missing update statement", theSql.contains("UPDATE mycatalog.mytable"));
String[] sqlLines = theSql.split("\n");
int lineToCheck = 0;
assertEquals("UPDATE mycatalog.mytable SET col2 = 'value2' WHERE pk_col1 = 'value1'", sqlLines[lineToCheck].trim());
assertEquals("Wrong number of lines", 1, sqlLines.length);
}
private String prepareInsertStatement(DatabaseFunction databaseSchemaBasedFunction) {
OracleDatabase database = new OracleDatabase();
database.setObjectQuotingStrategy(ObjectQuotingStrategy.LEGACY);
InsertGenerator generator = new InsertGenerator();
InsertStatement statement = new InsertStatement("mycatalog", "myschema", "mytable");
ColumnConfig columnConfig = new ColumnConfig();
if (databaseSchemaBasedFunction instanceof SequenceNextValueFunction) {
columnConfig.setValueSequenceNext((SequenceNextValueFunction) databaseSchemaBasedFunction);
} else if (databaseSchemaBasedFunction instanceof SequenceCurrentValueFunction) {
columnConfig.setValueSequenceCurrent((SequenceCurrentValueFunction) databaseSchemaBasedFunction);
}
columnConfig.setName("col3");
statement.addColumn(columnConfig);
Sql[] sql = generator.generateSql(statement, database, null);
return sql[0].toSql();
}
private String prepareUpdateStatement(SequenceNextValueFunction sequenceNextValueFunction) {
OracleDatabase database = new OracleDatabase();
database.setObjectQuotingStrategy(ObjectQuotingStrategy.LEGACY);
UpdateGenerator generator = new UpdateGenerator();
UpdateStatement statement = new UpdateStatement("mycatalog", "myschema", "mytable");
statement.addNewColumnValue("col3", sequenceNextValueFunction);
Sql[] sql = generator.generateSql(statement, database, null);
return sql[0].toSql();
}
@Test
public void testInsertSequenceValWithSchema() {
SequenceNextValueFunction sequenceNext = new SequenceNextValueFunction("myschema", "my_seq");
assertEquals(
"INSERT INTO mycatalog.mytable (col3) VALUES (myschema.my_seq.nextval)",
prepareInsertStatement(sequenceNext));
}
@Test
public void testInsertSequenceValWithSchemaInWholeStatement() {
SequenceNextValueFunction sequenceNext = new SequenceNextValueFunction("myschema", "my_seq");
assertEquals(
"INSERT INTO mycatalog.mytable (col3) VALUES (myschema.my_seq.nextval)",
prepareInsertStatement(sequenceNext));
}
@Test
public void testUpdateSequenceValWithSchema() {
SequenceNextValueFunction sequenceNext = new SequenceNextValueFunction("myschema", "my_seq");
assertEquals(
"UPDATE mycatalog.mytable SET col3 = myschema.my_seq.nextval",
prepareUpdateStatement(sequenceNext));
}
@Test
public void testUpdateSequenceValWithSchemaInWholeStatement() {
SequenceNextValueFunction sequenceNext = new SequenceNextValueFunction("myschema", "my_seq");
assertEquals(
"UPDATE mycatalog.mytable SET col3 = myschema.my_seq.nextval",
prepareUpdateStatement(sequenceNext));
}
}
| liquibase/liquibase | liquibase-core/src/test/java/liquibase/sqlgenerator/core/InsertOrUpdateGeneratorOracleTest.java | Java | apache-2.0 | 7,276 |
/*
* Copyright 2005-2011 Sixth and Red River Software, Bas Leijdekkers
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sixrr.metrics.ui.dialogs;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.io.IOException;
import java.net.URL;
import javax.swing.Action;
import javax.swing.JComponent;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JTextPane;
import javax.swing.event.HyperlinkEvent;
import javax.swing.event.HyperlinkListener;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.Nullable;
import com.intellij.ide.BrowserUtil;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.ui.HyperlinkLabel;
import com.intellij.ui.ScrollPaneFactory;
import com.sixrr.metrics.Metric;
import com.sixrr.metrics.utils.MetricsReloadedBundle;
public class ExplanationDialog extends DialogWrapper
{
private static final Action[] EMPTY_ACTION_ARRAY = new Action[0];
private final JTextPane textPane = new JTextPane();
private final HyperlinkLabel urlLabel = new HyperlinkLabel();
private final JLabel moreInformationLabel = new JLabel(MetricsReloadedBundle.message("for.more.information.go.to"));
public ExplanationDialog(Project project)
{
super(project, false);
setModal(true);
init();
pack();
}
public void run(Metric metric)
{
@NonNls final String descriptionName = "/metricsDescriptions/" + metric.getID() + ".html";
final boolean resourceFound = setDescriptionFromResource(descriptionName, metric);
if(!resourceFound)
{
setDescriptionFromResource("/metricsDescriptions/UnderConstruction.html", metric);
}
setTitle(MetricsReloadedBundle.message("explanation.dialog.title", metric.getDisplayName()));
final String helpString = metric.getHelpDisplayString();
final String helpURL = metric.getHelpURL();
if(helpString == null)
{
urlLabel.setVisible(false);
moreInformationLabel.setVisible(false);
}
else
{
urlLabel.setHyperlinkText(helpString);
urlLabel.setVisible(true);
moreInformationLabel.setVisible(true);
}
urlLabel.addHyperlinkListener(new HyperlinkListener()
{
@Override
public void hyperlinkUpdate(HyperlinkEvent e)
{
if(helpURL != null)
{
BrowserUtil.launchBrowser("http://" + helpURL);
}
}
});
show();
}
private boolean setDescriptionFromResource(@NonNls String resourceName, Metric metric)
{
try
{
final URL resourceURL = metric.getClass().getResource(resourceName);
textPane.setPage(resourceURL);
return true;
}
catch(IOException ignored)
{
return false;
}
}
@Override
@NonNls
protected String getDimensionServiceKey()
{
return "MetricsReloaded.ExplanationDialog";
}
@Override
public Action[] createActions()
{
return EMPTY_ACTION_ARRAY;
}
@Override
@Nullable
protected JComponent createCenterPanel()
{
final JPanel panel = new JPanel();
panel.setLayout(new GridBagLayout());
final GridBagConstraints constraints = new GridBagConstraints();
constraints.weightx = 1.0;
constraints.weighty = 1.0;
constraints.gridwidth = 2;
constraints.fill = GridBagConstraints.BOTH;
panel.add(ScrollPaneFactory.createScrollPane(textPane), constraints);
constraints.gridwidth = 1;
constraints.weightx = 0.0;
constraints.weighty = 0.0;
constraints.gridy = 1;
panel.add(moreInformationLabel, constraints);
constraints.gridx = 1;
constraints.insets.left = 5;
panel.add(urlLabel, constraints);
return panel;
}
}
| consulo/consulo-metrics | src/com/sixrr/metrics/ui/dialogs/ExplanationDialog.java | Java | apache-2.0 | 4,055 |
// Copyright 2015 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package expression
import (
"reflect"
"sync"
. "github.com/pingcap/check"
"github.com/pingcap/tidb/parser/ast"
"github.com/pingcap/tidb/parser/charset"
"github.com/pingcap/tidb/parser/model"
"github.com/pingcap/tidb/parser/mysql"
"github.com/pingcap/tidb/sessionctx"
"github.com/pingcap/tidb/types"
"github.com/pingcap/tidb/util/chunk"
)
func evalBuiltinFuncConcurrent(f builtinFunc, row chunk.Row) (d types.Datum, err error) {
wg := sync.WaitGroup{}
concurrency := 10
wg.Add(concurrency)
var lock sync.Mutex
err = nil
for i := 0; i < concurrency; i++ {
go func() {
defer wg.Done()
di, erri := evalBuiltinFunc(f, chunk.Row{})
lock.Lock()
if err == nil {
d, err = di, erri
}
lock.Unlock()
}()
}
wg.Wait()
return
}
func evalBuiltinFunc(f builtinFunc, row chunk.Row) (d types.Datum, err error) {
var (
res interface{}
isNull bool
)
switch f.getRetTp().EvalType() {
case types.ETInt:
var intRes int64
intRes, isNull, err = f.evalInt(row)
if mysql.HasUnsignedFlag(f.getRetTp().Flag) {
res = uint64(intRes)
} else {
res = intRes
}
case types.ETReal:
res, isNull, err = f.evalReal(row)
case types.ETDecimal:
res, isNull, err = f.evalDecimal(row)
case types.ETDatetime, types.ETTimestamp:
res, isNull, err = f.evalTime(row)
case types.ETDuration:
res, isNull, err = f.evalDuration(row)
case types.ETJson:
res, isNull, err = f.evalJSON(row)
case types.ETString:
res, isNull, err = f.evalString(row)
}
if isNull || err != nil {
d.SetNull()
return d, err
}
d.SetValue(res, f.getRetTp())
return
}
// tblToDtbl is a util function for test.
func tblToDtbl(i interface{}) []map[string][]types.Datum {
l := reflect.ValueOf(i).Len()
tbl := make([]map[string][]types.Datum, l)
for j := 0; j < l; j++ {
v := reflect.ValueOf(i).Index(j).Interface()
val := reflect.ValueOf(v)
t := reflect.TypeOf(v)
item := make(map[string][]types.Datum, val.NumField())
for k := 0; k < val.NumField(); k++ {
tmp := val.Field(k).Interface()
item[t.Field(k).Name] = makeDatums(tmp)
}
tbl[j] = item
}
return tbl
}
func makeDatums(i interface{}) []types.Datum {
if i != nil {
t := reflect.TypeOf(i)
val := reflect.ValueOf(i)
switch t.Kind() {
case reflect.Slice:
l := val.Len()
res := make([]types.Datum, l)
for j := 0; j < l; j++ {
res[j] = types.NewDatum(val.Index(j).Interface())
}
return res
}
}
return types.MakeDatums(i)
}
func (s *testEvaluatorSuite) TestIsNullFunc(c *C) {
fc := funcs[ast.IsNull]
f, err := fc.getFunction(s.ctx, s.datumsToConstants(types.MakeDatums(1)))
c.Assert(err, IsNil)
v, err := evalBuiltinFunc(f, chunk.Row{})
c.Assert(err, IsNil)
c.Assert(v.GetInt64(), Equals, int64(0))
f, err = fc.getFunction(s.ctx, s.datumsToConstants(types.MakeDatums(nil)))
c.Assert(err, IsNil)
v, err = evalBuiltinFunc(f, chunk.Row{})
c.Assert(err, IsNil)
c.Assert(v.GetInt64(), Equals, int64(1))
}
func (s *testEvaluatorSuite) TestLock(c *C) {
lock := funcs[ast.GetLock]
f, err := lock.getFunction(s.ctx, s.datumsToConstants(types.MakeDatums(nil, 1)))
c.Assert(err, IsNil)
v, err := evalBuiltinFunc(f, chunk.Row{})
c.Assert(err, IsNil)
c.Assert(v.GetInt64(), Equals, int64(1))
releaseLock := funcs[ast.ReleaseLock]
f, err = releaseLock.getFunction(s.ctx, s.datumsToConstants(types.MakeDatums(1)))
c.Assert(err, IsNil)
v, err = evalBuiltinFunc(f, chunk.Row{})
c.Assert(err, IsNil)
c.Assert(v.GetInt64(), Equals, int64(1))
}
func (s *testEvaluatorSuite) TestDisplayName(c *C) {
c.Assert(GetDisplayName(ast.EQ), Equals, "=")
c.Assert(GetDisplayName(ast.NullEQ), Equals, "<=>")
c.Assert(GetDisplayName(ast.IsTruthWithoutNull), Equals, "IS TRUE")
c.Assert(GetDisplayName("abs"), Equals, "abs")
c.Assert(GetDisplayName("other_unknown_func"), Equals, "other_unknown_func")
}
// newFunctionForTest creates a new ScalarFunction using funcName and arguments,
// it is different from expression.NewFunction which needs an additional retType argument.
func newFunctionForTest(ctx sessionctx.Context, funcName string, args ...Expression) (Expression, error) {
fc, ok := funcs[funcName]
if !ok {
return nil, errFunctionNotExists.GenWithStackByArgs("FUNCTION", funcName)
}
funcArgs := make([]Expression, len(args))
copy(funcArgs, args)
f, err := fc.getFunction(ctx, funcArgs)
if err != nil {
return nil, err
}
return &ScalarFunction{
FuncName: model.NewCIStr(funcName),
RetType: f.getRetTp(),
Function: f,
}, nil
}
var (
// MySQL int8.
int8Con = &Constant{RetType: &types.FieldType{Tp: mysql.TypeLonglong, Charset: charset.CharsetBin, Collate: charset.CollationBin}}
// MySQL varchar.
varcharCon = &Constant{RetType: &types.FieldType{Tp: mysql.TypeVarchar, Charset: charset.CharsetUTF8, Collate: charset.CollationUTF8}}
)
| coocood/tidb | expression/builtin_test.go | GO | apache-2.0 | 5,387 |
//Funcion para insertar un trozo de codigo HTML
function loadXMLDoc(url)
{
var xmlhttp;
if (window.XMLHttpRequest)
{// code for IE7+, Firefox, Chrome, Opera, Safari
xmlhttp=new XMLHttpRequest();
}
else
{// code for IE6, IE5
xmlhttp=new ActiveXObject("Microsoft.XMLHTTP");
}
xmlhttp.onreadystatechange=function()
{
if (xmlhttp.readyState==4 && xmlhttp.status==200)
{
document.getElementById("principal").innerHTML=xmlhttp.responseText;
}
}
xmlhttp.open("GET",url,true);
xmlhttp.send();
}
| CarlosIribarren/Ejemplos-Examples | JavaScript/Nativo/Algunos ejemplos/02 Insertar codigo HTML/funcionJS.js | JavaScript | apache-2.0 | 535 |
package Init.Enum;
import Generics.Generator;
import java.util.EnumMap;
import java.util.Iterator;
import static Init.Enum.Input.*;
import static Init.Print.print;
/**
* Created by Defias on 2020/07.
* Description: 使用enum的状态机
*
*/
public class TestEnumStates {
public static void main(String[] args) {
Generator<Input> gen = new RandomInputGenerator();
VendingMachine.run(gen);
}
}
class VendingMachine {
private static State state = State.RESTING;
private static int amount = 0;
private static Input selection = null;
enum StateDuration { TRANSIENT } // Tagging enum
enum State {
RESTING {
void next(Input input) {
switch(Category.categorize(input)) {
case MONEY:
amount += input.amount();
state = ADDING_MONEY;
break;
case SHUT_DOWN:
state = TERMINAL;
default:
}
}
},
ADDING_MONEY {
void next(Input input) {
switch(Category.categorize(input)) {
case MONEY:
amount += input.amount();
break;
case ITEM_SELECTION:
selection = input;
if(amount < selection.amount())
print("Insufficient money for " + selection);
else state = DISPENSING;
break;
case QUIT_TRANSACTION:
state = GIVING_CHANGE;
break;
case SHUT_DOWN:
state = TERMINAL;
default:
}
}
},
DISPENSING(StateDuration.TRANSIENT) {
void next() {
print("here is your " + selection);
amount -= selection.amount();
state = GIVING_CHANGE;
}
},
GIVING_CHANGE(StateDuration.TRANSIENT) {
void next() {
if(amount > 0) {
print("Your change: " + amount);
amount = 0;
}
state = RESTING;
}
},
TERMINAL { void output() { print("Halted"); }};
private boolean isTransient = false;
State() {}
State(StateDuration trans) {
isTransient = true;
}
void next(Input input) {
throw new RuntimeException("Only call " +
"next(Input input) for non-transient states");
}
void next() {
throw new RuntimeException("Only call next() for " +
"StateDuration.TRANSIENT states");
}
void output() { print(amount); }
}
static void run(Generator<Input> gen) {
while(state != State.TERMINAL) {
state.next(gen.next());
while(state.isTransient)
state.next();
state.output();
}
}
}
// For a basic sanity check:
class RandomInputGenerator implements Generator<Input> {
public Input next() {
return Input.randomSelection();
}
}
enum Category {
MONEY(NICKEL, DIME, QUARTER, DOLLAR),
ITEM_SELECTION(TOOTHPASTE, CHIPS, SODA, SOAP),
QUIT_TRANSACTION(ABORT_TRANSACTION),
SHUT_DOWN(STOP);
private Input[] values;
Category(Input... types) {
values = types;
}
private static EnumMap<Input,Category> categories =
new EnumMap<Input,Category>(Input.class);
static {
for(Category c : Category.class.getEnumConstants())
for(Input type : c.values)
categories.put(type, c);
}
public static Category categorize(Input input) {
return categories.get(input);
}
}
| gdefias/JavaDemo | InitJava/base/src/main/java/Init/Enum/TestEnumStates.java | Java | apache-2.0 | 3,956 |
#include <QtWidgets>
#include <math.h>
#include "starrating.h"
const int PaintingScaleFactor = 20;
StarRating::StarRating(int starCount, int maxStarCount)
{
myStarCount = starCount;
myMaxStarCount = maxStarCount;
starPolygon << QPointF(1.0, 0.5);
for (int i = 1; i < 5; ++i)
{
starPolygon << QPointF(0.5 + 0.5 * cos(0.8 * i * 3.14), 0.5 + 0.5 * sin(0.8 * i * 3.14));
}
diamondPolygon << QPointF(0.4, 0.5) << QPointF(0.5, 0.4)
<< QPointF(0.6, 0.5) << QPointF(0.5, 0.6)
<< QPointF(0.4, 0.5);
}
QSize StarRating::sizeHint() const
{
return PaintingScaleFactor * QSize(myMaxStarCount, 1);
}
void StarRating::paint(QPainter *painter, const QRect &rect, const QPalette &palette, EditMode mode) const
{
painter->save();
painter->setRenderHint(QPainter::Antialiasing, true);
painter->setPen(Qt::NoPen);
if (mode == Editable)
{
painter->setBrush(palette.highlight());
}
else
{
painter->setBrush(palette.foreground());
}
int yOffset = (rect.height() - PaintingScaleFactor) / 2;
painter->translate(rect.x(), rect.y() + yOffset);
painter->scale(PaintingScaleFactor, PaintingScaleFactor);
for (int i = 0; i < myMaxStarCount; ++i)
{
if (i < myStarCount)
{
painter->drawPolygon(starPolygon, Qt::WindingFill);
}
else if (mode == Editable)
{
painter->drawPolygon(diamondPolygon, Qt::WindingFill);
}
painter->translate(1.0, 0.0);
}
painter->restore();
}
| luchenqun/StarDelegate | starrating.cpp | C++ | apache-2.0 | 1,586 |
package org.marsik.bugautomation.rest;
import javax.ws.rs.ApplicationPath;
import javax.ws.rs.core.Application;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
@ApplicationPath("/")
public class RestApplication extends Application {
@Override
public Set<Class<?>> getClasses() {
final HashSet<Class<?>> classes = new HashSet<>();
classes.add(MetricsEndpoint.class);
classes.add(InfoEndpoint.class);
return classes;
}
}
| MarSik/bugautomation | src/main/java/org/marsik/bugautomation/rest/RestApplication.java | Java | apache-2.0 | 495 |
/**
* <copyright>
* </copyright>
*
* $Id$
*/
package net.opengis.gml.provider;
import java.util.Collection;
import java.util.List;
import net.opengis.gml.GmlFactory;
import net.opengis.gml.GmlPackage;
import net.opengis.gml.TopoVolumeType;
import org.eclipse.emf.common.notify.AdapterFactory;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.ecore.EStructuralFeature;
import org.eclipse.emf.ecore.util.FeatureMap;
import org.eclipse.emf.ecore.util.FeatureMapUtil;
import org.eclipse.emf.edit.provider.IEditingDomainItemProvider;
import org.eclipse.emf.edit.provider.IItemLabelProvider;
import org.eclipse.emf.edit.provider.IItemPropertyDescriptor;
import org.eclipse.emf.edit.provider.IItemPropertySource;
import org.eclipse.emf.edit.provider.IStructuredItemContentProvider;
import org.eclipse.emf.edit.provider.ITreeItemContentProvider;
import org.eclipse.emf.edit.provider.ViewerNotification;
/**
* This is the item provider adapter for a {@link net.opengis.gml.TopoVolumeType} object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public class TopoVolumeTypeItemProvider
extends AbstractTopologyTypeItemProvider
implements
IEditingDomainItemProvider,
IStructuredItemContentProvider,
ITreeItemContentProvider,
IItemLabelProvider,
IItemPropertySource {
/**
* This constructs an instance from a factory and a notifier.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public TopoVolumeTypeItemProvider(AdapterFactory adapterFactory) {
super(adapterFactory);
}
/**
* This returns the property descriptors for the adapted class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public List<IItemPropertyDescriptor> getPropertyDescriptors(Object object) {
if (itemPropertyDescriptors == null) {
super.getPropertyDescriptors(object);
}
return itemPropertyDescriptors;
}
/**
* This specifies how to implement {@link #getChildren} and is used to deduce an appropriate feature for an
* {@link org.eclipse.emf.edit.command.AddCommand}, {@link org.eclipse.emf.edit.command.RemoveCommand} or
* {@link org.eclipse.emf.edit.command.MoveCommand} in {@link #createCommand}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Collection<? extends EStructuralFeature> getChildrenFeatures(Object object) {
if (childrenFeatures == null) {
super.getChildrenFeatures(object);
childrenFeatures.add(GmlPackage.eINSTANCE.getTopoVolumeType_DirectedTopoSolid());
}
return childrenFeatures;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EStructuralFeature getChildFeature(Object object, Object child) {
// Check the type of the specified child object and return the proper feature to use for
// adding (see {@link AddCommand}) it as a child.
return super.getChildFeature(object, child);
}
/**
* This returns TopoVolumeType.gif.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object getImage(Object object) {
return overlayImage(object, getResourceLocator().getImage("full/obj16/TopoVolumeType"));
}
/**
* This returns the label text for the adapted class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String getText(Object object) {
String label = ((TopoVolumeType)object).getId();
return label == null || label.length() == 0 ?
getString("_UI_TopoVolumeType_type") :
getString("_UI_TopoVolumeType_type") + " " + label;
}
/**
* This handles model notifications by calling {@link #updateChildren} to update any cached
* children and by creating a viewer notification, which it passes to {@link #fireNotifyChanged}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void notifyChanged(Notification notification) {
updateChildren(notification);
switch (notification.getFeatureID(TopoVolumeType.class)) {
case GmlPackage.TOPO_VOLUME_TYPE__DIRECTED_TOPO_SOLID:
fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), true, false));
return;
}
super.notifyChanged(notification);
}
/**
* This adds {@link org.eclipse.emf.edit.command.CommandParameter}s describing the children
* that can be created under this object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected void collectNewChildDescriptors(Collection<Object> newChildDescriptors, Object object) {
super.collectNewChildDescriptors(newChildDescriptors, object);
newChildDescriptors.add
(createChildParameter
(GmlPackage.eINSTANCE.getTopoVolumeType_DirectedTopoSolid(),
GmlFactory.eINSTANCE.createDirectedTopoSolidPropertyType()));
}
/**
* This returns the label text for {@link org.eclipse.emf.edit.command.CreateChildCommand}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String getCreateChildText(Object owner, Object feature, Object child, Collection<?> selection) {
Object childFeature = feature;
Object childObject = child;
if (childFeature instanceof EStructuralFeature && FeatureMapUtil.isFeatureMap((EStructuralFeature)childFeature)) {
FeatureMap.Entry entry = (FeatureMap.Entry)childObject;
childFeature = entry.getEStructuralFeature();
childObject = entry.getValue();
}
boolean qualify =
childFeature == GmlPackage.eINSTANCE.getAbstractGMLType_Name() ||
childFeature == GmlPackage.eINSTANCE.getDocumentRoot_CoordinateOperationName() ||
childFeature == GmlPackage.eINSTANCE.getDocumentRoot_CsName() ||
childFeature == GmlPackage.eINSTANCE.getDocumentRoot_DatumName() ||
childFeature == GmlPackage.eINSTANCE.getDocumentRoot_EllipsoidName() ||
childFeature == GmlPackage.eINSTANCE.getDocumentRoot_GroupName() ||
childFeature == GmlPackage.eINSTANCE.getDocumentRoot_MeridianName() ||
childFeature == GmlPackage.eINSTANCE.getDocumentRoot_MethodName() ||
childFeature == GmlPackage.eINSTANCE.getDocumentRoot_ParameterName() ||
childFeature == GmlPackage.eINSTANCE.getDocumentRoot_SrsName();
if (qualify) {
return getString
("_UI_CreateChild_text2",
new Object[] { getTypeText(childObject), getFeatureText(childFeature), getTypeText(owner) });
}
return super.getCreateChildText(owner, feature, child, selection);
}
}
| markus1978/citygml4emf | de.hub.citygml.emf.ecore.edit/src/net/opengis/gml/provider/TopoVolumeTypeItemProvider.java | Java | apache-2.0 | 6,438 |
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.test.espresso.action;
import static androidx.test.platform.app.InstrumentationRegistry.getInstrumentation;
import static junit.framework.Assert.assertEquals;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.spy;
import android.view.View;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.SmallTest;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
/** Unit tests for {@link GeneralLocation}. */
@SmallTest
@RunWith(AndroidJUnit4.class)
public class GeneralLocationTest {
private static final int VIEW_POSITION_X = 100;
private static final int VIEW_POSITION_Y = 50;
private static final int VIEW_WIDTH = 150;
private static final int VIEW_HEIGHT = 300;
private static final int AXIS_X = 0;
private static final int AXIS_Y = 1;
private View mockView;
@Before
public void setUp() throws Exception {
mockView = spy(new View(getInstrumentation().getContext()));
doAnswer(
new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
int[] array = (int[]) invocation.getArguments()[0];
array[AXIS_X] = VIEW_POSITION_X;
array[AXIS_Y] = VIEW_POSITION_Y;
return null;
}
})
.when(mockView)
.getLocationOnScreen(any(int[].class));
mockView.layout(
VIEW_POSITION_X,
VIEW_POSITION_Y,
VIEW_POSITION_X + VIEW_WIDTH,
VIEW_POSITION_Y + VIEW_HEIGHT);
}
@Test
public void leftLocationsX() {
assertPositionEquals(VIEW_POSITION_X, GeneralLocation.TOP_LEFT, AXIS_X);
assertPositionEquals(VIEW_POSITION_X, GeneralLocation.CENTER_LEFT, AXIS_X);
assertPositionEquals(VIEW_POSITION_X, GeneralLocation.BOTTOM_LEFT, AXIS_X);
}
@Test
public void rightLocationsX() {
assertPositionEquals(VIEW_POSITION_X + VIEW_WIDTH - 1, GeneralLocation.TOP_RIGHT, AXIS_X);
assertPositionEquals(VIEW_POSITION_X + VIEW_WIDTH - 1, GeneralLocation.CENTER_RIGHT, AXIS_X);
assertPositionEquals(VIEW_POSITION_X + VIEW_WIDTH - 1, GeneralLocation.BOTTOM_RIGHT, AXIS_X);
}
@Test
public void topLocationsY() {
assertPositionEquals(VIEW_POSITION_Y, GeneralLocation.TOP_LEFT, AXIS_Y);
assertPositionEquals(VIEW_POSITION_Y, GeneralLocation.TOP_CENTER, AXIS_Y);
assertPositionEquals(VIEW_POSITION_Y, GeneralLocation.TOP_RIGHT, AXIS_Y);
}
@Test
public void bottomLocationsY() {
assertPositionEquals(VIEW_POSITION_Y + VIEW_HEIGHT - 1, GeneralLocation.BOTTOM_LEFT, AXIS_Y);
assertPositionEquals(VIEW_POSITION_Y + VIEW_HEIGHT - 1, GeneralLocation.BOTTOM_CENTER, AXIS_Y);
assertPositionEquals(VIEW_POSITION_Y + VIEW_HEIGHT - 1, GeneralLocation.BOTTOM_RIGHT, AXIS_Y);
}
@Test
public void centerLocationsX() {
assertPositionEquals(VIEW_POSITION_X + (VIEW_WIDTH - 1) / 2.0f, GeneralLocation.CENTER, AXIS_X);
assertPositionEquals(
VIEW_POSITION_X + (VIEW_WIDTH - 1) / 2.0f, GeneralLocation.TOP_CENTER, AXIS_X);
assertPositionEquals(
VIEW_POSITION_X + (VIEW_WIDTH - 1) / 2.0f, GeneralLocation.BOTTOM_CENTER, AXIS_X);
}
@Test
public void centerLocationsY() {
assertPositionEquals(
VIEW_POSITION_Y + (VIEW_HEIGHT - 1) / 2.0f, GeneralLocation.CENTER, AXIS_Y);
assertPositionEquals(
VIEW_POSITION_Y + (VIEW_HEIGHT - 1) / 2.0f, GeneralLocation.CENTER_LEFT, AXIS_Y);
assertPositionEquals(
VIEW_POSITION_Y + (VIEW_HEIGHT - 1) / 2.0f, GeneralLocation.CENTER_RIGHT, AXIS_Y);
}
private void assertPositionEquals(float expected, GeneralLocation location, int axis) {
assertEquals(expected, location.calculateCoordinates(mockView)[axis], 0.1f);
}
}
| android/android-test | espresso/core/javatests/androidx/test/espresso/action/GeneralLocationTest.java | Java | apache-2.0 | 4,547 |
#
# Copyright 2015-2019, Institute for Systems Biology
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from builtins import map
from builtins import str
from builtins import object
from copy import deepcopy
import logging
from bq_data_access.v2.seqpeek.seqpeek_interpro import InterProDataProvider
logger = logging.getLogger('main_logger')
SAMPLE_ID_FIELD_NAME = 'sample_id'
TRACK_ID_FIELD = "tumor"
COORDINATE_FIELD_NAME = 'uniprot_aapos'
PROTEIN_ID_FIELD = 'ensg_id'
PROTEIN_DOMAIN_DB = 'PFAM'
SEQPEEK_VIEW_DEBUG_MODE = False
def get_number_of_unique_samples(track):
sample_ids = set()
for mutation in track['mutations']:
sample_ids.add(mutation[SAMPLE_ID_FIELD_NAME])
return len(sample_ids)
def get_number_of_mutated_positions(track):
sample_locations = set()
for mutation in track['mutations']:
sample_locations.add(mutation[COORDINATE_FIELD_NAME])
return len(sample_locations)
# TODO remove if not needed
def clean_track_mutations(mutations_array):
retval = []
for mutation in mutations_array:
cleaned = deepcopy(mutation)
cleaned[COORDINATE_FIELD_NAME] = int(mutation[COORDINATE_FIELD_NAME])
retval.append(cleaned)
return retval
def sort_track_mutations(mutations_array):
return sorted(mutations_array, key=lambda k: k[COORDINATE_FIELD_NAME])
def get_track_statistics_by_track_type(track, cohort_info_map):
track_id = track[TRACK_ID_FIELD]
result = {
'samples': {
'numberOf': get_number_of_unique_samples(track),
'mutated_positions': get_number_of_mutated_positions(track)
}
}
if track['type'] == 'tumor':
cohort_info = cohort_info_map[track_id]
result['cohort_size'] = cohort_info['size']
else:
# Do not assign cohort size for the 'COMBINED' track.
result['cohort_size'] = None
return result
def filter_protein_domains(match_array):
return [m for m in match_array if m['dbname'] == PROTEIN_DOMAIN_DB]
def get_table_row_id(tumor_type):
return "seqpeek_row_{0}".format(tumor_type)
def build_seqpeek_regions(protein_data):
return [{
'type': 'exon',
'start': 0,
'end': protein_data['length']
}]
def build_summary_track(tracks):
all = []
for track in tracks:
all.extend(track["mutations"])
return {
'mutations': all,
'label': 'COMBINED',
'tumor': 'none-combined',
'type': 'summary'
}
def get_track_label_and_cohort_information(track_id_value, cohort_info_map):
cohort_info = cohort_info_map[track_id_value]
label = cohort_info['name']
cohort_size = cohort_info['size']
return label, cohort_size
def get_track_label(track, cohort_info_array):
# The IDs in cohort_info_array are integers, whereas the track IDs are strings.
cohort_map = {str(item['id']): item['name'] for item in cohort_info_array}
return cohort_map[track[TRACK_ID_FIELD]]
def get_protein_domains(uniprot_id):
protein = InterProDataProvider().get_data(uniprot_id)
return protein
class MAFData(object):
def __init__(self, cohort_info, data):
self.cohort_info = cohort_info
self.data = data
@classmethod
def from_dict(cls, param):
return cls(param['cohort_set'], param['items'])
def build_track_data(track_id_list, all_tumor_mutations):
tracks = []
for track_id in track_id_list:
tracks.append({
TRACK_ID_FIELD: track_id,
'mutations': [m for m in all_tumor_mutations if int(track_id) in set(m['cohort'])]
})
return tracks
def find_uniprot_id(mutations):
uniprot_id = None
for m in mutations:
if PROTEIN_ID_FIELD in m:
uniprot_id = m[PROTEIN_ID_FIELD]
break
return uniprot_id
def get_genes_tumors_lists_debug():
return {
'symbol_list': ['EGFR', 'TP53', 'PTEN'],
'disease_codes': ['ACC', 'BRCA', 'GBM']
}
def get_genes_tumors_lists_remote():
context = {
'symbol_list': [],
'track_id_list': []
}
return context
def get_genes_tumors_lists():
if SEQPEEK_VIEW_DEBUG_MODE:
return get_genes_tumors_lists_debug()
else:
return get_genes_tumors_lists_remote()
def get_track_id_list(param):
return list(map(str, param))
def format_removed_row_statistics_to_list(stats_dict):
result = []
for key, value in list(stats_dict.items()):
result.append({
'name': key,
'num': value
})
return result
class SeqPeekViewDataBuilder(object):
def build_view_data(self, hugo_symbol, filtered_maf_vector, seqpeek_cohort_info, cohort_id_list, removed_row_statistics, tables_used):
context = get_genes_tumors_lists()
cohort_info_map = {str(item['id']): item for item in seqpeek_cohort_info}
track_id_list = get_track_id_list(cohort_id_list)
# Since the gene (hugo_symbol) parameter is part of the GNAB feature ID,
# it will be sanity-checked in the SeqPeekMAFDataAccess instance.
uniprot_id = find_uniprot_id(filtered_maf_vector)
logging.info("UniProt ID: " + str(uniprot_id))
protein_data = get_protein_domains(uniprot_id)
track_data = build_track_data(track_id_list, filtered_maf_vector)
plot_data = {
'gene_label': hugo_symbol,
'tracks': track_data,
'protein': protein_data
}
# Pre-processing
# - Sort mutations by chromosomal coordinate
for track in plot_data['tracks']:
track['mutations'] = sort_track_mutations(track['mutations'])
# Annotations
# - Add label, possibly human readable
# - Add type that indicates whether the track is driven by data from search or
# if the track is aggregate
for track in plot_data['tracks']:
track['type'] = 'tumor'
label, cohort_size = get_track_label_and_cohort_information(track[TRACK_ID_FIELD], cohort_info_map)
track['label'] = label
# Display the "combined" track only if more than one cohort is visualized
if len(cohort_id_list) >= 2:
plot_data['tracks'].append(build_summary_track(plot_data['tracks']))
for track in plot_data['tracks']:
# Calculate statistics
track['statistics'] = get_track_statistics_by_track_type(track, cohort_info_map)
# Unique ID for each row
track['render_info'] = {
'row_id': get_table_row_id(track[TRACK_ID_FIELD])
}
plot_data['regions'] = build_seqpeek_regions(plot_data['protein'])
plot_data['protein']['matches'] = filter_protein_domains(plot_data['protein']['matches'])
tumor_list = ','.join(track_id_list)
context.update({
'plot_data': plot_data,
'hugo_symbol': hugo_symbol,
'tumor_list': tumor_list,
'cohort_id_list': track_id_list,
'removed_row_statistics': format_removed_row_statistics_to_list(removed_row_statistics),
'bq_tables': list(set(tables_used))
})
return context
| isb-cgc/ISB-CGC-Webapp | bq_data_access/v2/seqpeek/seqpeek_view.py | Python | apache-2.0 | 7,709 |
/*
* Copyright 2014 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.channel.unix;
import io.netty.util.internal.ClassInitializerUtil;
import io.netty.util.internal.UnstableApi;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.PortUnreachableException;
import java.nio.channels.ClosedChannelException;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* Tells if <a href="https://netty.io/wiki/native-transports.html">{@code netty-transport-native-unix}</a> is
* supported.
*/
public final class Unix {
private static final AtomicBoolean registered = new AtomicBoolean();
static {
// Preload all classes that will be used in the OnLoad(...) function of JNI to eliminate the possiblity of a
// class-loader deadlock. This is a workaround for https://github.com/netty/netty/issues/11209.
// This needs to match all the classes that are loaded via NETTY_JNI_UTIL_LOAD_CLASS or looked up via
// NETTY_JNI_UTIL_FIND_CLASS.
ClassInitializerUtil.tryLoadClasses(Unix.class,
// netty_unix_errors
OutOfMemoryError.class, RuntimeException.class, ClosedChannelException.class,
IOException.class, PortUnreachableException.class,
// netty_unix_socket
DatagramSocketAddress.class, InetSocketAddress.class
);
}
/**
* Internal method... Should never be called from the user.
*
* @param registerTask
*/
@UnstableApi
public static void registerInternal(Runnable registerTask) {
if (registered.compareAndSet(false, true)) {
registerTask.run();
Socket.initialize();
}
}
/**
* Returns {@code true} if and only if the <a href="https://netty.io/wiki/native-transports.html">{@code
* netty_transport_native_unix}</a> is available.
*/
@Deprecated
public static boolean isAvailable() {
return false;
}
/**
* Ensure that <a href="https://netty.io/wiki/native-transports.html">{@code netty_transport_native_unix}</a> is
* available.
*
* @throws UnsatisfiedLinkError if unavailable
*/
@Deprecated
public static void ensureAvailability() {
throw new UnsupportedOperationException();
}
/**
* Returns the cause of unavailability of <a href="https://netty.io/wiki/native-transports.html">
* {@code netty_transport_native_unix}</a>.
*
* @return the cause if unavailable. {@code null} if available.
*/
@Deprecated
public static Throwable unavailabilityCause() {
return new UnsupportedOperationException();
}
private Unix() {
}
}
| doom369/netty | transport-native-unix-common/src/main/java/io/netty/channel/unix/Unix.java | Java | apache-2.0 | 3,285 |
import numpy as np
import xgboost as xgb
import pytest
try:
import shap
except ImportError:
shap = None
pass
pytestmark = pytest.mark.skipif(shap is None, reason="Requires shap package")
# Check integration is not broken from xgboost side
# Changes in binary format may cause problems
def test_with_shap():
from sklearn.datasets import fetch_california_housing
X, y = fetch_california_housing(return_X_y=True)
dtrain = xgb.DMatrix(X, label=y)
model = xgb.train({"learning_rate": 0.01}, dtrain, 10)
explainer = shap.TreeExplainer(model)
shap_values = explainer.shap_values(X)
margin = model.predict(dtrain, output_margin=True)
assert np.allclose(np.sum(shap_values, axis=len(shap_values.shape) - 1),
margin - explainer.expected_value, 1e-3, 1e-3)
| dmlc/xgboost | tests/python/test_with_shap.py | Python | apache-2.0 | 817 |
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.dialogflow.v3.model;
/**
* The response message for Agents.ListAgents.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Dialogflow API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class GoogleCloudDialogflowCxV3ListAgentsResponse extends com.google.api.client.json.GenericJson {
/**
* The list of agents. There will be a maximum number of items returned based on the page_size
* field in the request.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<GoogleCloudDialogflowCxV3Agent> agents;
static {
// hack to force ProGuard to consider GoogleCloudDialogflowCxV3Agent used, since otherwise it would be stripped out
// see https://github.com/google/google-api-java-client/issues/543
com.google.api.client.util.Data.nullOf(GoogleCloudDialogflowCxV3Agent.class);
}
/**
* Token to retrieve the next page of results, or empty if there are no more results in the list.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String nextPageToken;
/**
* The list of agents. There will be a maximum number of items returned based on the page_size
* field in the request.
* @return value or {@code null} for none
*/
public java.util.List<GoogleCloudDialogflowCxV3Agent> getAgents() {
return agents;
}
/**
* The list of agents. There will be a maximum number of items returned based on the page_size
* field in the request.
* @param agents agents or {@code null} for none
*/
public GoogleCloudDialogflowCxV3ListAgentsResponse setAgents(java.util.List<GoogleCloudDialogflowCxV3Agent> agents) {
this.agents = agents;
return this;
}
/**
* Token to retrieve the next page of results, or empty if there are no more results in the list.
* @return value or {@code null} for none
*/
public java.lang.String getNextPageToken() {
return nextPageToken;
}
/**
* Token to retrieve the next page of results, or empty if there are no more results in the list.
* @param nextPageToken nextPageToken or {@code null} for none
*/
public GoogleCloudDialogflowCxV3ListAgentsResponse setNextPageToken(java.lang.String nextPageToken) {
this.nextPageToken = nextPageToken;
return this;
}
@Override
public GoogleCloudDialogflowCxV3ListAgentsResponse set(String fieldName, Object value) {
return (GoogleCloudDialogflowCxV3ListAgentsResponse) super.set(fieldName, value);
}
@Override
public GoogleCloudDialogflowCxV3ListAgentsResponse clone() {
return (GoogleCloudDialogflowCxV3ListAgentsResponse) super.clone();
}
}
| googleapis/google-api-java-client-services | clients/google-api-services-dialogflow/v3/1.31.0/com/google/api/services/dialogflow/v3/model/GoogleCloudDialogflowCxV3ListAgentsResponse.java | Java | apache-2.0 | 3,689 |
package com.deleidos.framework.monitoring.response;
public class InfoResponse {
public static final String PATH = "/proxy/${APP_ID}/ws/v2/stram/info";
public static class Stats {
public int allocatedContainers;
public int plannedContainers;
public int totalVCoresAllocated;
public int vcoresRequired;
public int memoryRequired;
public int tuplesProcessedPSMA;
public long totalTuplesProcessed;
public int tuplesEmittedPSMA;
public long totalTuplesEmitted;
public int totalMemoryAllocated;
public int totalBufferServerReadBytesPSMA;
public int totalBufferServerWriteBytesPSMA;
public int[] criticalPath;
public int latency;
public long windowStartMillis;
public int numOperators;
public int failedContainers;
public long currentWindowId;
public long recoveryWindowId;
}
public String name;
public String user;
public long startTime;
public long elapsedTime;
public String appPath;
public String gatewayAddress;
public boolean gatewayConnected;
public Object[] appDataSources;
public Object metrics;
public Object attributes;
public String appMasterTrackingUrl;
public String version;
public Stats stats;
public String id;
//public String state; // Can't get this from this request
}
| deleidos/de-pipeline-tool | de-framework-monitoring/src/main/java/com/deleidos/framework/monitoring/response/InfoResponse.java | Java | apache-2.0 | 1,247 |
using EnsureThat;
using LibGit2Sharp;
using System;
using System.Collections.Generic;
using System.Linq;
namespace GitStats.Console
{
internal class CouplingAnalyser : GitAnalyser
{
//What files are always checked in with each other?
public void Analyse(string gitPath)
{
var repo = OpenRepository(gitPath);
if (repo == null) return;
System.Console.WriteLine("OK");
//Ignore merge commits
var commits = repo.Commits.Where(c => !c.Message.ToLowerInvariant().StartsWith("merge"));
var commitsProcessed = 0;
var commitsTotal = commits.Count();
var allCommitsByFile = new Dictionary<string, List<string>>();
var allFilesByCommit = new Dictionary<string, List<string>>();
foreach (var commit in commits)
{
commitsProcessed++;
if (commitsProcessed % 100 == 0) { System.Console.WriteLine($"Progress: {commitsProcessed}/{commitsTotal}"); }
foreach (var parent in commit.Parents)
{
foreach (var change in repo.Diff.Compare<TreeChanges>(parent.Tree, commit.Tree))
{
if (allCommitsByFile.ContainsKey(change.Path))
{
allCommitsByFile[change.Path].Add(commit.Sha);
}
else
{
allCommitsByFile.Add(change.Path, new List<string>() { commit.Sha });
}
if (allFilesByCommit.ContainsKey(commit.Sha))
{
allFilesByCommit[commit.Sha].Add(change.Path);
}
else
{
allFilesByCommit.Add(commit.Sha, new List<string>() { change.Path });
}
}
}
}
System.Console.WriteLine("--- Highly Coupled Files ---");
//for each file:
// count the commits then loop through them
// lookup the related files in that commit
// track a running total of each file committed at the same time
//we want related files with the same commit count as the file we're tracking
// i.e. if a.cs was committed 5 times, and b.cs was in 75%+ of those commits, they're tightly coupled.
var pairScoresByFile = new Dictionary<string, PairStats>();
foreach (var allCommitsForAFile in allCommitsByFile)
{
var pairStats = new PairStats();
foreach (var commit in allCommitsForAFile.Value)
{
pairStats.CommitCount++;
var pairedFiles = allFilesByCommit[commit].Where(path => path != allCommitsForAFile.Key);
foreach (var pairedFile in pairedFiles)
{
if (pairStats.RelatedFileCounts.ContainsKey(pairedFile))
{
pairStats.RelatedFileCounts[pairedFile]++;
}
else
{
pairStats.RelatedFileCounts.Add(pairedFile, 1);
}
}
}
pairScoresByFile.Add(allCommitsForAFile.Key, pairStats);
}
foreach (var pair in pairScoresByFile.Where(p => p.Value.TightCouplingCount > 0 && p.Value.CommitCount > 4)
.OrderByDescending(p => p.Value.CommitCount)
.OrderByDescending(p => p.Value.TightCouplingCount)
.Take(10))
{
System.Console.WriteLine($"File: {pair.Key} | {pair.Value.CommitCount} commits");
foreach (var pairedFile in pair.Value.TightlyCoupledFiles.OrderByDescending(f => f.CouplingRatio))
{
System.Console.WriteLine($" | {pairedFile.CouplingRatio * 100}% - {pairedFile.FileName}");
}
}
}
private Repository OpenRepository(string gitPath)
{
EnsureArg.IsNotNullOrWhiteSpace(gitPath);
if (Repository.IsValid(gitPath))
{
return new Repository(gitPath);
};
return null;
}
}
internal class PairStats
{
public int CommitCount { get; set; }
public Dictionary<string, int> RelatedFileCounts { get; private set; } = new Dictionary<string, int>();
public int TightCouplingCount
{
get
{
return RelatedFileCounts.Count(r => ExceedsCouplingThreshold(r.Value));
}
}
public IEnumerable<FileCouplingStats> TightlyCoupledFiles
{
get
{
return RelatedFileCounts.Where(r => ExceedsCouplingThreshold(r.Value))
.Select(r => new FileCouplingStats()
{
FileName = r.Key,
CouplingRatio = (double)r.Value / CommitCount
});
}
}
private bool ExceedsCouplingThreshold(int value)
{
return 0.75d < value / CommitCount;
}
}
internal class FileCouplingStats
{
public double CouplingRatio { get; set; }
public string FileName { get; set; }
}
} | rbanks54/GitStats | GitStats.Console/CouplingAnalyser.cs | C# | apache-2.0 | 5,712 |
package uk.ac.manchester.cs.openphacts.queryexpander.queryLoader;
import java.util.List;
import java.util.Set;
import org.bridgedb.uri.tools.GraphResolver;
import org.bridgedb.utils.Reporter;
import static org.junit.Assert.*;
import org.junit.Test;
import uk.ac.manchester.cs.openphacts.queryexpander.QueryUtils;
import uk.ac.manchester.cs.openphacts.queryexpander.api.QueryExpander;
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
/**
*
* @author Christian
*/
public abstract class OpsReplacemeentTest {
protected QueryExpander queryExpander;
private final String NO_LENS = null;
@Test
public void testAllNoMapping() throws Exception{
GraphResolver.addTestMappings();
QueryCaseLoader loader = new OpsReplacementLoader();
Set<String> queryKeys = loader.keySet();
for (String queryKey:queryKeys){
Reporter.println("Testing " + loader.getQueryName(queryKey));
String originalQuery = loader.getOriginalQuery(queryKey);
String targetQuery = loader.getTextReplaceQuery(queryKey);
List<String> parameters = loader.getParameters(queryKey);
String inputURI = loader.getInsertURI(queryKey);
//ystem.out.println(originalQuery);
//ystem.out.println(parameters);
String newQuery = queryExpander.expand(originalQuery, parameters, inputURI, NO_LENS, false);
//System.out.println(newQuery);
if (!QueryUtils.sameTupleExpr(targetQuery, newQuery, false, loader.getQueryName(queryKey))){
assertTrue(QueryUtils.sameTupleExpr(targetQuery, newQuery, true, loader.getQueryName(queryKey)));
}
}
}
}
| openphacts/queryExpander | query.expander.implementation/test/uk/ac/manchester/cs/openphacts/queryexpander/queryLoader/OpsReplacemeentTest.java | Java | apache-2.0 | 1,764 |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInspection.util;
import com.intellij.psi.*;
import com.intellij.psi.controlFlow.DefUseUtil;
import com.intellij.psi.search.searches.ReferencesSearch;
import com.intellij.psi.util.InheritanceUtil;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiUtil;
import com.siyeh.ig.psiutils.ExpressionUtils;
import one.util.streamex.MoreCollectors;
import one.util.streamex.StreamEx;
import org.jetbrains.annotations.Contract;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
/**
* Represents the iterator which traverses the iterable within the loop
*
* @author Tagir Valeev
*/
public class IteratorDeclaration {
private final @NotNull PsiLocalVariable myIterator;
private final @Nullable PsiExpression myIterable;
private final boolean myCollection;
private IteratorDeclaration(@NotNull PsiLocalVariable iterator, @Nullable PsiExpression iterable, boolean collection) {
myIterator = iterator;
myIterable = iterable;
myCollection = collection;
}
@NotNull
public PsiLocalVariable getIterator() {
return myIterator;
}
@Nullable
public PsiExpression getIterable() {
return myIterable;
}
public boolean isCollection() {
return myCollection;
}
public boolean isHasNextCall(PsiExpression condition) {
return isIteratorMethodCall(condition, "hasNext");
}
@Nullable
public PsiElement findOnlyIteratorRef(PsiExpression parent) {
PsiElement element = PsiUtil.getVariableCodeBlock(myIterator, null);
PsiCodeBlock block =
element instanceof PsiCodeBlock ? (PsiCodeBlock)element : PsiTreeUtil.getParentOfType(element, PsiCodeBlock.class);
if (block == null) return null;
return StreamEx.of(DefUseUtil.getRefs(block, myIterator, myIterator.getInitializer()))
.filter(e -> PsiTreeUtil.isAncestor(parent, e, false))
.collect(MoreCollectors.onlyOne()).orElse(null);
}
public boolean isIteratorMethodCall(PsiElement candidate, String method) {
if (!(candidate instanceof PsiMethodCallExpression)) return false;
PsiMethodCallExpression call = (PsiMethodCallExpression)candidate;
if (call.getArgumentList().getExpressions().length != 0) return false;
PsiReferenceExpression expression = call.getMethodExpression();
return method.equals(expression.getReferenceName()) && ExpressionUtils.isReferenceTo(expression.getQualifierExpression(), myIterator);
}
public PsiVariable getNextElementVariable(PsiStatement statement) {
if (!(statement instanceof PsiDeclarationStatement)) return null;
PsiDeclarationStatement declaration = (PsiDeclarationStatement)statement;
if (declaration.getDeclaredElements().length != 1) return null;
PsiElement element = declaration.getDeclaredElements()[0];
if (!(element instanceof PsiLocalVariable)) return null;
PsiLocalVariable var = (PsiLocalVariable)element;
if (!isIteratorMethodCall(var.getInitializer(), "next")) return null;
return var;
}
@Contract("null -> null")
private static IteratorDeclaration extract(PsiStatement statement) {
if (!(statement instanceof PsiDeclarationStatement)) return null;
PsiDeclarationStatement declaration = (PsiDeclarationStatement)statement;
if (declaration.getDeclaredElements().length != 1) return null;
PsiElement element = declaration.getDeclaredElements()[0];
if (!(element instanceof PsiLocalVariable)) return null;
PsiLocalVariable variable = (PsiLocalVariable)element;
PsiExpression initializer = variable.getInitializer();
if (!(initializer instanceof PsiMethodCallExpression)) return null;
PsiMethodCallExpression call = (PsiMethodCallExpression)initializer;
if (call.getArgumentList().getExpressions().length != 0) return null;
PsiReferenceExpression methodExpression = call.getMethodExpression();
if (!"iterator".equals(methodExpression.getReferenceName())) return null;
PsiMethod method = call.resolveMethod();
if (method == null || !InheritanceUtil.isInheritor(method.getContainingClass(), CommonClassNames.JAVA_LANG_ITERABLE)) return null;
boolean isCollection = InheritanceUtil.isInheritor(method.getContainingClass(), CommonClassNames.JAVA_UTIL_COLLECTION);
PsiType type = variable.getType();
if (!(type instanceof PsiClassType) || !((PsiClassType)type).rawType().equalsToText(CommonClassNames.JAVA_UTIL_ITERATOR)) return null;
return new IteratorDeclaration(variable, methodExpression.getQualifierExpression(), isCollection);
}
@Nullable
private static IteratorDeclaration fromForLoop(PsiForStatement statement) {
if (statement.getUpdate() != null) return null;
PsiStatement initialization = statement.getInitialization();
IteratorDeclaration declaration = extract(initialization);
if (declaration == null || !declaration.isHasNextCall(statement.getCondition())) return null;
return declaration;
}
@Nullable
private static IteratorDeclaration fromWhileLoop(PsiWhileStatement statement) {
PsiElement previous = PsiTreeUtil.skipSiblingsBackward(statement, PsiComment.class, PsiWhiteSpace.class);
if (!(previous instanceof PsiDeclarationStatement)) return null;
IteratorDeclaration declaration = extract((PsiStatement)previous);
if (declaration == null || !declaration.isHasNextCall(statement.getCondition())) return null;
if (!ReferencesSearch.search(declaration.myIterator, declaration.myIterator.getUseScope()).forEach(ref -> {
return PsiTreeUtil.isAncestor(statement, ref.getElement(), true);
})) {
return null;
}
return declaration;
}
/**
* Creates {@code IteratorDeclaration} if the loop follows one of these patterns:
*
* <pre>{@code
* Iterator<T> it = iterable.iterator();
* while(it.hasNext()) {
* ...
* }
* // And iterator is not reused after the loop
* }</pre>
*
* or
*
* <pre>{@code
* for(Iterator<T> it = iterable.iterator();it.hasNext();) {
* ...
* }
* }</pre>
*
* @param statement loop to create the {@code IteratorDeclaration} from
* @return created IteratorDeclaration or null if the loop pattern is not recognized.
*/
@Contract("null -> null")
public static IteratorDeclaration fromLoop(PsiLoopStatement statement) {
if(statement instanceof PsiWhileStatement) {
return fromWhileLoop((PsiWhileStatement)statement);
}
if(statement instanceof PsiForStatement) {
return fromForLoop((PsiForStatement)statement);
}
return null;
}
}
| youdonghai/intellij-community | java/java-impl/src/com/intellij/codeInspection/util/IteratorDeclaration.java | Java | apache-2.0 | 7,121 |
package com.xidian.yetwish.reading.ui;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import com.xidian.yetwish.reading.R;
import com.xidian.yetwish.reading.framework.utils.SharedPreferencesUtils;
import com.xidian.yetwish.reading.ui.main.ReadingActivity;
/**
* splash activity
* Created by Yetwish on 2016/4/8 0008.
*/
public class SplashActivity extends BaseActivity {
private static final int MSG_SPLASH = 0x01;
private Handler mHandler = new Handler(){
@Override
public void handleMessage(Message msg) {
super.handleMessage(msg);
switch (msg.what){
case MSG_SPLASH:
ReadingActivity.startActivity(SplashActivity.this);
finish();
break;
}
}
};
public static void startActivity(Context context,boolean splash){
Intent intent = new Intent(context,SplashActivity.class);
intent.putExtra(SharedPreferencesUtils.EXTRA_SPLASH,splash);
context.startActivity(intent);
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_splash);
drawStatusBar();
boolean splash = getIntent().getBooleanExtra(SharedPreferencesUtils.EXTRA_SPLASH,true);
if(splash){
new Thread(new Runnable() {
@Override
public void run() {
try {
Thread.sleep(1000);
}catch (InterruptedException e){
//TODO catch exception
}finally {
mHandler.sendEmptyMessage(MSG_SPLASH);
}
}
}).start();
}
}
}
| yetwish/Reading | reading/src/main/java/com/xidian/yetwish/reading/ui/SplashActivity.java | Java | apache-2.0 | 1,907 |
/* Web Polygraph http://www.web-polygraph.org/
* Copyright 2003-2014 The Measurement Factory
* Licensed under the Apache License, Version 2.0 */
#include "base/polygraph.h"
#include "xstd/h/iostream.h"
#include "xstd/h/string.h"
#include "xstd/CpuAffinitySet.h"
static std::ostream &operator <<(std::ostream &os, const cpu_set_t &cpuSet);
CpuAffinitySet::CpuAffinitySet() {
CPU_ZERO(&raw);
}
bool CpuAffinitySet::apply(ostream &err) {
Must(CPU_COUNT(&raw) > 0);
cpu_set_t origCpuSet;
CPU_ZERO(&origCpuSet);
if (sched_getaffinity(0, sizeof(origCpuSet), &origCpuSet) != 0) {
err << "failed to get original CPU affinity: " << Error::Last();
return false;
}
// CPU_AND to be able to distinguish EINVAL due to external restrictions
// from other EINVAL errors after calling sched_setaffinity().
cpu_set_t allowedCpuSet;
memcpy(&allowedCpuSet, &raw, sizeof(allowedCpuSet));
CPU_AND(&allowedCpuSet, &allowedCpuSet, &origCpuSet);
if (CPU_COUNT(&allowedCpuSet) <= 0) {
err << "requested CPU affinity is incompatible with preexisting restrictions" << std::endl <<
"requested affinity: " << raw << std::endl <<
"existing affinity: " << origCpuSet;
return false;
}
if (sched_setaffinity(0, sizeof(allowedCpuSet), &allowedCpuSet) != 0) {
err << "failed to set CPU affinity: " << Error::Last() << std::endl <<
"CPU affinity (after adjustments) was: " << allowedCpuSet;
return false;
}
cpu_set_t resultingCpuSet;
CPU_ZERO(&resultingCpuSet);
if (sched_getaffinity(0, sizeof(resultingCpuSet), &resultingCpuSet) != 0) {
err << "failed to get changed CPU affinity" << Error::Last();
return false;
}
if (!CPU_EQUAL(&raw, &resultingCpuSet)) {
err << "resulting/set CPU affinity: " << resultingCpuSet << std::endl <<
"differs from the configured CPU affinity: " << raw;
return true; // treat this as a warning, not error
}
return true;
}
void CpuAffinitySet::reset(const int coreId, const bool turnOn) {
if (turnOn)
CPU_SET(coreId, &raw);
else
CPU_CLR(coreId, &raw);
}
std::ostream &CpuAffinitySet::print(std::ostream &os) const {
return os << raw;
}
static
std::ostream &operator <<(std::ostream &os, const cpu_set_t &cpuSet) {
const int count = CPU_COUNT(&cpuSet);
os << '[';
for (int printed = 0, i = 0; i < CPU_SETSIZE && printed < count; ++i) {
if (CPU_ISSET(i, &cpuSet)) {
os << i;
if (++printed < count)
os << ',';
}
}
os << ']';
return os;
}
| albertok/web-polygraph | src/xstd/CpuAffinitySet.cc | C++ | apache-2.0 | 2,439 |
package fr.javatronic.blog.massive.annotation1.sub1;
import fr.javatronic.blog.processor.Annotation_001;
@Annotation_001
public class Class_5188 {
}
| lesaint/experimenting-annotation-processing | experimenting-rounds/massive-count-of-annotated-classes/src/main/java/fr/javatronic/blog/massive/annotation1/sub1/Class_5188.java | Java | apache-2.0 | 151 |
/*
* @license Apache-2.0
*
* Copyright (c) 2019 The Stdlib Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import FLOAT64_MAX_SAFE_NTH_FIBONACCI = require( './index' );
// TESTS //
// The export is a number...
{
// tslint:disable-next-line:no-unused-expression
FLOAT64_MAX_SAFE_NTH_FIBONACCI; // $ExpectType number
}
| stdlib-js/stdlib | lib/node_modules/@stdlib/constants/float64/max-safe-nth-fibonacci/docs/types/test.ts | TypeScript | apache-2.0 | 832 |
/*
* #%L
* %%
* Copyright (C) 2011 - 2017 BMW Car IT GmbH
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
define("joynr/messaging/inprocess/InProcessMessagingSkeleton", [], function() {
/**
* @name InProcessMessagingSkeleton
* @constructor
*/
function InProcessMessagingSkeleton() {
var onReceive;
/**
* @name InProcessMessagingSkeleton#receiveMessage
* @function
*
* @param {JoynrMessage} joynrMessage
* @returns {Object} A+ promise object
*/
this.receiveMessage = function receiveMessage(joynrMessage) {
return onReceive(joynrMessage);
};
/**
* A setter for the callback function that will receive the incoming messages
*
* @name InProcessMessagingSkeleton#registerListener
* @function
*
* @param {Function} newOnReceive the function that is called with the incoming JoynrMessage
*/
this.registerListener = function registerListener(newOnReceive) {
onReceive = newOnReceive;
};
}
return InProcessMessagingSkeleton;
}); | clive-jevons/joynr | javascript/libjoynr-js/src/main/js/joynr/messaging/inprocess/InProcessMessagingSkeleton.js | JavaScript | apache-2.0 | 1,684 |
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openehealth.ipf.platform.camel.core.management;
import javax.management.MalformedObjectNameException;
import javax.management.ObjectName;
import org.apache.camel.CamelContext;
import org.apache.camel.Processor;
import org.apache.camel.management.DefaultManagementNamingStrategy;
import org.apache.camel.model.ProcessorDefinition;
import org.apache.camel.model.ProcessorDefinitionHelper;
import org.apache.camel.model.RouteDefinition;
/**
* @author Reinhard Luft
*/
public class ProcessorManagementNamingStrategy extends
DefaultManagementNamingStrategy {
public static final String KEY_ROUTE = "route";
public ObjectName getObjectNameForProcessor(CamelContext context,
Processor processor, ProcessorDefinition<?> definition)
throws MalformedObjectNameException {
StringBuilder buffer = new StringBuilder();
buffer.append(domainName).append(":");
buffer.append(KEY_CONTEXT + "=").append(getContextId(context)).append(",");
buffer.append(KEY_TYPE + "=").append(TYPE_PROCESSOR).append(",");
RouteDefinition route = ProcessorDefinitionHelper.getRoute(definition);
if (route != null) {
buffer.append(KEY_ROUTE + "=").append(route.getId()).append(",");
}
buffer.append(KEY_NAME + "=").append(ObjectName.quote(definition.getId()));
return createObjectName(buffer);
}
}
| krasserm/ipf | platform-camel/core/src/main/java/org/openehealth/ipf/platform/camel/core/management/ProcessorManagementNamingStrategy.java | Java | apache-2.0 | 2,034 |
package com.buddycloud.utils;
import android.app.Activity;
import android.content.Context;
import android.view.inputmethod.InputMethodManager;
public class InputUtils {
public static void hideKeyboard(Activity activity) {
InputMethodManager imm = (InputMethodManager) activity.getSystemService(Context.INPUT_METHOD_SERVICE);
imm.hideSoftInputFromWindow(activity.getCurrentFocus().getWindowToken(), 0);
}
public static boolean isActive(Activity activity) {
InputMethodManager imm = (InputMethodManager) activity.getSystemService(Context.INPUT_METHOD_SERVICE);
return imm.isActive();
}
}
| buddycloud/buddycloud-android | src/com/buddycloud/utils/InputUtils.java | Java | apache-2.0 | 620 |
/**
* Cached RegExp object for a global search of /
* @type {RegExp}
*/
const encodedSlashRegExp = new RegExp(encodeURIComponent('/'), 'g');
/**
* Replaces any occurrence of / with the encoded equivalent
* @param {string} urn
* @return {string}
*/
export const encodeForwardSlash = (urn: string): string => urn.replace(/\//g, () => encodeURIComponent('/'));
/**
* Replaces encoded slashes with /
* @param {string} urn
* @return {string}
*/
export const decodeForwardSlash = (urn: string): string =>
urn.replace(encodedSlashRegExp, () => decodeURIComponent('/'));
/**
* Replaces occurrences of / with the encoded counterpart in a urn string
* @param {string} urn
* @return {string}
*/
export const encodeUrn = (urn: string): string => encodeForwardSlash(urn);
/**
* Replaces encoded occurrences of / with the string /
* @param {string} urn
* @return {string}
*/
export const decodeUrn = (urn: string): string => decodeForwardSlash(urn);
/**
* Stores the encoded URL for the asterisk/wildcard symbol since encodeURIComponent doesn't catch these
* as a reserved symbol
* @type {string}
*/
const encodedWildcard = '%2A';
/**
* Cached RegExp object for a global search of /
* @type {RegExp}
*/
const encodedWildcardRegExp = new RegExp(encodedWildcard, 'g');
/**
* Replaces any occurence of * with the encoded equivalent
* @param {string} urn
* @return {string}
*/
export const encodeWildcard = (urn: string): string => urn.replace(/\*/g, encodedWildcard);
/**
* Replaces encoded slashes with /
* @param {string} urn
* @return {string}
*/
export const decodeWildcard = (urn: string): string => urn.replace(encodedWildcardRegExp, decodeURIComponent('*'));
/**
* Will extract the entity type from a urn
* @param urn
*/
export const extractEntityType = (urn: string): string | undefined => urn.split(':')[2];
| mars-lan/WhereHows | datahub-web/@datahub/utils/addon/validators/urn.ts | TypeScript | apache-2.0 | 1,847 |
import pathlib
import importlib
import sys
__all__ = ['sample', 'sampleTxt', 'sampleBin']
this = pathlib.Path(__file__)
datadir = this.parent.parent / 'data'
loader = importlib.machinery.SourceFileLoader('sample', str(datadir / 'sample.py'))
sample = loader.load_module()
sampleTxt = datadir / 'sample.txt'
sampleBin = datadir / 'sample.bin'
| viridia/coda | test/python/finddata.py | Python | apache-2.0 | 345 |
/*******************************************************************************
* # Copyright 2015 InfinitiesSoft Solutions Inc.
* #
* # Licensed under the Apache License, Version 2.0 (the "License"); you may
* # not use this file except in compliance with the License. You may obtain
* # a copy of the License at
* #
* # http://www.apache.org/licenses/LICENSE-2.0
* #
* # Unless required by applicable law or agreed to in writing, software
* # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* # License for the specific language governing permissions and limitations
* # under the License.
*******************************************************************************/
package com.infinities.keystone4j.contrib.revoke.driver;
import java.util.Calendar;
import java.util.List;
import com.infinities.keystone4j.contrib.revoke.model.RevokeEvent;
public interface RevokeDriver {
// lastFetch=null
List<RevokeEvent> getEvents(Calendar lastFetch);
void revoke(RevokeEvent event);
}
| infinitiessoft/keystone4j | keystone4j-core/src/main/java/com/infinities/keystone4j/contrib/revoke/driver/RevokeDriver.java | Java | apache-2.0 | 1,112 |
package com.flying.promotion.javatuning.future.jdk;
import java.util.concurrent.Callable;
/**
* Created by Joseph on 7/25/2016.
*/
public class RealData implements Callable<String>{
private String para;
public RealData(String para){
this.para=para;
}
@Override
public String call() throws Exception {
StringBuffer sb=new StringBuffer();
for (int i = 0; i < 10; i++) {
sb.append(para);
try {
Thread.sleep(100);
} catch (InterruptedException e) {
}
}
return sb.toString();
}
}
| wangshijun101/JavaSenior | CoreJava/src/main/java/com/flying/promotion/javatuning/future/jdk/RealData.java | Java | apache-2.0 | 610 |
/**
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* Copyright 2012-2017 the original author or authors.
*/
package org.assertj.core.util.introspection;
import static java.lang.String.format;
import static java.util.Collections.emptyList;
import static java.util.Collections.unmodifiableList;
import static org.assertj.core.util.IterableUtil.isNullOrEmpty;
import static org.assertj.core.util.Preconditions.checkArgument;
import static org.assertj.core.util.introspection.Introspection.getPropertyGetter;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.List;
import org.assertj.core.util.VisibleForTesting;
/**
* Utility methods for properties access.
*
* @author Joel Costigliola
* @author Alex Ruiz
* @author Nicolas François
* @author Florent Biville
*/
public class PropertySupport {
private static final String SEPARATOR = ".";
private static final PropertySupport INSTANCE = new PropertySupport();
/**
* Returns the singleton instance of this class.
*
* @return the singleton instance of this class.
*/
public static PropertySupport instance() {
return INSTANCE;
}
@VisibleForTesting
PropertySupport() {
}
/**
* Returns a <code>{@link List}</code> containing the values of the given property name, from the elements of the
* given <code>{@link Iterable}</code>. If the given {@code Iterable} is empty or {@code null}, this method will
* return an empty {@code List}. This method supports nested properties (e.g. "address.street.number").
*
* @param propertyName the name of the property. It may be a nested property. It is left to the clients to validate
* for {@code null} or empty.
* @param target the given {@code Iterable}.
* @return an {@code Iterable} containing the values of the given property name, from the elements of the given
* {@code Iterable}.
* @throws IntrospectionError if an element in the given {@code Iterable} does not have a property with a matching
* name.
*/
public <T> List<T> propertyValues(String propertyName, Class<T> clazz, Iterable<?> target) {
if (isNullOrEmpty(target)) {
return emptyList();
}
if (isNestedProperty(propertyName)) {
String firstPropertyName = popPropertyNameFrom(propertyName);
Iterable<Object> propertyValues = propertyValues(firstPropertyName, Object.class, target);
// extract next sub-property values until reaching the last sub-property
return propertyValues(nextPropertyNameFrom(propertyName), clazz, propertyValues);
}
return simplePropertyValues(propertyName, clazz, target);
}
/**
* Static variant of {@link #propertyValueOf(String, Class, Object)} for syntactic sugar.
* <p>
*
* @param propertyName the name of the property. It may be a nested property. It is left to the clients to validate
* for {@code null} or empty.
* @param target the given object
* @param clazz type of property
* @return a the values of the given property name
* @throws IntrospectionError if the given target does not have a property with a matching name.
*/
public static <T> T propertyValueOf(String propertyName, Object target, Class<T> clazz) {
return instance().propertyValueOf(propertyName, clazz, target);
}
private <T> List<T> simplePropertyValues(String propertyName, Class<T> clazz, Iterable<?> target) {
List<T> propertyValues = new ArrayList<>();
for (Object e : target) {
propertyValues.add(e == null ? null : propertyValue(propertyName, clazz, e));
}
return unmodifiableList(propertyValues);
}
private String popPropertyNameFrom(String propertyNameChain) {
if (!isNestedProperty(propertyNameChain)) {
return propertyNameChain;
}
return propertyNameChain.substring(0, propertyNameChain.indexOf(SEPARATOR));
}
private String nextPropertyNameFrom(String propertyNameChain) {
if (!isNestedProperty(propertyNameChain)) {
return "";
}
return propertyNameChain.substring(propertyNameChain.indexOf(SEPARATOR) + 1);
}
/**
* <pre><code class='java'> isNestedProperty("address.street"); // true
* isNestedProperty("address.street.name"); // true
* isNestedProperty("person"); // false
* isNestedProperty(".name"); // false
* isNestedProperty("person."); // false
* isNestedProperty("person.name."); // false
* isNestedProperty(".person.name"); // false
* isNestedProperty("."); // false
* isNestedProperty(""); // false</code></pre>
*/
private boolean isNestedProperty(String propertyName) {
return propertyName.contains(SEPARATOR) && !propertyName.startsWith(SEPARATOR) && !propertyName.endsWith(SEPARATOR);
}
/**
* Return the value of a simple property from a target object.
* <p>
* This only works for simple property, nested property are not supported ! use
* {@link #propertyValueOf(String, Class, Object)}
*
* @param propertyName the name of the property. It may be a nested property. It is left to the clients to validate
* for {@code null} or empty.
* @param target the given object
* @param clazz type of property
* @return a the values of the given property name
* @throws IntrospectionError if the given target does not have a property with a matching name.
*/
@SuppressWarnings("unchecked")
public <T> T propertyValue(String propertyName, Class<T> clazz, Object target) {
Method getter = getPropertyGetter(propertyName, target);
try {
return (T) getter.invoke(target);
} catch (ClassCastException e) {
String msg = format("Unable to obtain the value of the property <'%s'> from <%s> - wrong property type specified <%s>",
propertyName, target, clazz);
throw new IntrospectionError(msg, e);
} catch (Exception unexpected) {
String msg = format("Unable to obtain the value of the property <'%s'> from <%s>", propertyName, target);
throw new IntrospectionError(msg, unexpected);
}
}
/**
* Returns the value of the given property name given target. If the given object is {@code null}, this method will
* return null.<br>
* This method supports nested properties (e.g. "address.street.number").
*
* @param propertyName the name of the property. It may be a nested property. It is left to the clients to validate
* for {@code null} or empty.
* @param clazz the class of property.
* @param target the given Object to extract property from.
* @return the value of the given property name given target.
* @throws IntrospectionError if target object does not have a property with a matching name.
* @throws IllegalArgumentException if propertyName is null.
*/
public <T> T propertyValueOf(String propertyName, Class<T> clazz, Object target) {
checkArgument(propertyName != null, "the property name should not be null.");
// returns null if target is null as we can't extract a property from a null object
// but don't want to raise an exception if we were looking at a nested property
if (target == null) return null;
if (isNestedProperty(propertyName)) {
String firstPropertyName = popPropertyNameFrom(propertyName);
Object propertyValue = propertyValue(firstPropertyName, Object.class, target);
// extract next sub-property values until reaching the last sub-property
return propertyValueOf(nextPropertyNameFrom(propertyName), clazz, propertyValue);
}
return propertyValue(propertyName, clazz, target);
}
/**
* just delegates to {@link #propertyValues(String, Class, Iterable)} with Class being Object.class
*/
public List<Object> propertyValues(String fieldOrPropertyName, Iterable<?> objects) {
return propertyValues(fieldOrPropertyName, Object.class, objects);
}
public boolean publicGetterExistsFor(String fieldName, Object actual) {
try {
getPropertyGetter(fieldName, actual);
} catch (IntrospectionError e) {
return false;
}
return true;
}
}
| bric3/assertj-core | src/main/java/org/assertj/core/util/introspection/PropertySupport.java | Java | apache-2.0 | 8,633 |
package com.ciandt.techgallery.servlets;
import java.io.IOException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
@SuppressWarnings("serial")
public class ViewTech extends HttpServlet {
@Override
public void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException {
String urlPage = "/viewTech.html";
if(!req.getQueryString().isEmpty()){
urlPage += "?" + req.getQueryString();
}
resp.setContentType("text/html");
resp.sendRedirect(urlPage);
}
}
| tuliobraga/tech-gallery | src/main/java/com/ciandt/techgallery/servlets/ViewTech.java | Java | apache-2.0 | 587 |
/*
* Copyright 2012-2013 inBloom, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.slc.sli.test.edfi.entities.meta;
import java.util.List;
public class GradeBookEntryMeta {
String id;
List<String> learningObjectiveIds;
GradingPeriodMeta gradingPeriod;
SectionMeta section;
String gradebookEntryType;
String dateAssigned;
public void setLearningObjectiveIds(List<String> learningObjectiveIds) {
this.learningObjectiveIds = learningObjectiveIds;
}
public List<String> getLearningObjectiveIds() {
return learningObjectiveIds;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public GradingPeriodMeta getGradingPeriod() {
return gradingPeriod;
}
public void setGradingPeriod(GradingPeriodMeta gradingPeriod) {
this.gradingPeriod = gradingPeriod;
}
public SectionMeta getSection() {
return section;
}
public void setSection(SectionMeta section) {
this.section = section;
}
public String getGradebookEntryType() {
return gradebookEntryType;
}
public void setGradebookEntryType(String gradebookEntryType) {
this.gradebookEntryType = gradebookEntryType;
}
public String getDateAssigned() {
return dateAssigned;
}
public void setDateAssigned(String dateAssigned) {
this.dateAssigned = dateAssigned;
}
}
| inbloom/secure-data-service | tools/data-tools/src/org/slc/sli/test/edfi/entities/meta/GradeBookEntryMeta.java | Java | apache-2.0 | 2,017 |
using Motomatic.Source.Automating;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.RegularExpressions;
namespace Motomatic.Source.Storage
{
class Script
{
const string AHK_EXPRESSION = "([A-z0-9]*)[:]{0,}=(.*)";
string _Code;
public string Name { get; set; }
public string Code { get { return _Code; } }
public static Script Load(string filename)
{
if (File.Exists(filename))
return new Script(Path.GetFileNameWithoutExtension(filename), File.ReadAllText(filename));
return null;
}
public Script(string name, string code)
{
Name = name;
_Code = code;
}
public bool Execute(params Variable[] args)
{
if (IsScriptValid(args))
{
Automation.Execute(_Code, Automation.AutomationLevel.Raw, args);
return true;
}
return false;
}
private bool IsScriptValid(Variable[] args)
{
var matches = Regex.Matches(_Code, AHK_EXPRESSION);
foreach (Match match in matches)
if (args.Any(v => v.Name.ToLower().Equals(match.Groups[1].Value.ToLower())))
throw new Exception(string.Format("EXCEPTION: Script \"{0}\" contains an assignment to variable \"%{1}%\"", Name, match.Groups[1].Value));
return true;
}
}
}
| TheCharmingCthulhu/CSharp | Projects/Windows Forms/Motomatic/Motomatic/Source/Storage/Script.cs | C# | apache-2.0 | 1,512 |
// Copyright 2017 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "clif/python/instance.h"
#include "gtest/gtest.h"
namespace clif {
class PrivateDestructor {
public:
PrivateDestructor() = default;
PrivateDestructor(const PrivateDestructor& other) = delete;
PrivateDestructor& operator=(const PrivateDestructor& other) = delete;
void Delete() { delete this; }
private:
~PrivateDestructor() = default;
};
class MyData {
public:
int a_, b_, c_;
};
TEST(InstanceTest, TestCreationFromRawPointerOwn) {
Instance<MyData> csp1(new MyData, OwnedResource());
std::unique_ptr<MyData> up1 = MakeStdUnique(&csp1);
EXPECT_TRUE(up1);
EXPECT_FALSE(csp1);
EXPECT_TRUE(csp1 == nullptr);
Instance<MyData> csp2(up1.release(), OwnedResource());
std::shared_ptr<MyData> sp = MakeStdShared(csp2);
std::unique_ptr<MyData> up2 = MakeStdUnique(&csp2);
EXPECT_FALSE(up2);
EXPECT_TRUE(csp2);
EXPECT_TRUE(sp);
EXPECT_TRUE(csp2 != nullptr);
}
TEST(InstanceTest, TestCreationFromRawPointerNotOwn) {
std::unique_ptr<MyData> up(new MyData);
Instance<MyData> csp1(up.get(), UnOwnedResource());
std::unique_ptr<MyData> up1 = MakeStdUnique(&csp1);
EXPECT_FALSE(up1);
EXPECT_TRUE(csp1);
std::shared_ptr<MyData> sp = MakeStdShared(csp1);
std::unique_ptr<MyData> up2 = MakeStdUnique(&csp1);
EXPECT_FALSE(up2);
EXPECT_TRUE(csp1);
EXPECT_TRUE(sp);
}
TEST(InstanceTest, TestCreateUnownedPrivateDestructpr) {
PrivateDestructor* obj = new PrivateDestructor();
Instance<PrivateDestructor> shared(obj, UnOwnedResource());
EXPECT_FALSE(shared == nullptr);
shared.Destruct();
obj->Delete();
}
TEST(InstanceTest, TestCreationFromUniquePointer) {
std::unique_ptr<MyData> up(new MyData);
Instance<MyData> csp1(std::move(up));
EXPECT_FALSE(up);
std::unique_ptr<MyData> up1 = MakeStdUnique(&csp1);
EXPECT_TRUE(up1);
EXPECT_FALSE(csp1);
Instance<MyData> csp2(move(up1));
std::shared_ptr<MyData> sp = MakeStdShared(csp2);
std::unique_ptr<MyData> up2 = MakeStdUnique(&csp2);
EXPECT_FALSE(up2);
EXPECT_TRUE(csp2);
EXPECT_TRUE(sp);
}
TEST(InstanceTest, TestCreationFromUniquePointerWithDefaultDeleter) {
std::unique_ptr<MyData, std::default_delete<MyData>> up(new MyData);
EXPECT_TRUE(up);
Instance<MyData> csp3(move(up));
EXPECT_FALSE(up);
EXPECT_TRUE(csp3);
}
TEST(InstanceTest, TestCreationFromSharedPointer) {
std::shared_ptr<MyData> sp1(new MyData);
Instance<MyData> csp1(sp1);
EXPECT_TRUE(sp1);
EXPECT_TRUE(csp1);
std::unique_ptr<MyData> up1 = MakeStdUnique(&csp1);
EXPECT_FALSE(up1);
EXPECT_TRUE(sp1);
EXPECT_TRUE(csp1);
std::shared_ptr<MyData> sp2 = MakeStdShared(csp1);
std::unique_ptr<MyData> up2 = MakeStdUnique(&csp1);
EXPECT_FALSE(up2);
EXPECT_TRUE(csp1);
EXPECT_TRUE(sp2);
}
} // namespace clif
| google/clif | clif/python/instance_test.cc | C++ | apache-2.0 | 3,338 |
var http=require('http');
var url = require('url')
var httpget = function ( url ) {
return new Promise(( resolve,reject)=>{
http.get( url ,function(req,res){
var html='';
req.on('data',function(data){
html+=data;
});
req.on('end',function(){
resolve(html);
});
req.on('error',function(err){
reject(err);
});
});
})
}
var httppostsimple = function (posturl,port,postData,username,passwd) {
var postDatastr=JSON.stringify(postData);
var urlObj = url.parse(posturl)
var p = username + ":" + passwd;
var b = new Buffer( p );
var cred = b.toString('base64');
var options={
hostname:urlObj.hostname,
port:port,
path: urlObj.pathname,
method:'POST',
headers:{
'Content-Type':'text/plain',
'Content-Length':Buffer.byteLength(postDatastr),
'Authorization': `Basic ${cred}`
}
}
return httppost(options,postDatastr);
}
var httppost = function (options,postData) {
/* var options={
hostname:'www.gongjuji.net',
port:80,
path:'/',
method:'POST',
headers:{
//'Content-Type':'application/x-www-form-urlencoded',
'Content-Type':'application/x-www-form-urlencoded; charset=UTF-8',
'Content-Length':Buffer.byteLength(postData)
}
}*/
return new Promise(( resolve,reject)=>{
var buffers = [];
var req=http.request(options, function(res) {
res.on('data',function(reposebuffer){
buffers.push(reposebuffer);
});
res.on('end',function(){
//console.log('No more data in response.********');
var wholeData = Buffer.concat(buffers);
var dataStr = wholeData.toString('utf8');
resolve(dataStr)
});
res.on('error',function(err){
reject(err);
});
});
req.write(postData);
req.end();
})
}
exports.httpget = httpget;
exports.httppost =httppost;
exports.httppostsimple = httppostsimple;
| 17golang/nodejsstudy | httpclient.js | JavaScript | apache-2.0 | 2,282 |
import {rnaPlot} from './rnaplot.js';
export function rnaTreemapChart() {
var width = 550;
var height = 400;
function rnaTreemapNode(selection) {
// create a background rectangle for each RNA structure
selection.each(function(d) {
d3.select(this)
.attr('transform', function(d) { return 'translate(' + d.x + ',' + d.y + ')' })
.append('rect')
.classed('structure-background-rect', true)
.attr('width', function(d) { return Math.max(0, d.dx); })
.attr('height', function(d) { return Math.max(0, d.dy); })
// draw the actual RNA structure
var chart = rnaPlot()
.width( Math.max(0, d.dx))
.height( Math.max(0, d.dy))
.labelInterval(0)
.rnaEdgePadding(10)
.showNucleotideLabels(false);
if ('structure' in d) d3.select(this).call(chart)
});
}
var chart = function(selection) {
selection.each(function(data) {
console.log('data:', data)
// initialize the treemap structure
// sample input
// { 'name': 'blah',
// 'children: [{'structure': '..((..))',
// 'sequence': 'ACCGGCC',
// 'size': 50}]
// }
var treemap = d3.layout.treemap()
.size([width, height])
.sticky(false)
.value(function(d) { return d.size; });
// create a new <g> for each node in the treemap
// this may be a little redundant, since we expect the calling
// selection to contain their own g elements
var gEnter = d3.select(this).append('g');
var treemapGnodes = gEnter.datum(data).selectAll('.treemapNode')
.data(treemap.nodes)
.enter()
.append('g')
.attr('class', 'treemapNode')
.call(rnaTreemapNode);
});
};
chart.width = function(_) {
if (!arguments.length) return width;
width = _;
return chart;
}
chart.height = function(_) {
if (!arguments.length) return height;
height = _;
return chart;
}
return chart;
}
function rnaTreemapGridChart() {
var chart = function(selection) {
console.log('selection:', selection);
selection.each(function(data) {
console.log('data:', data);
});
}
return chart;
}
| pkerpedjiev/fornac | app/scripts/rnatreemap.js | JavaScript | apache-2.0 | 2,511 |
package savetheenvironment.profiles.mocking;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.core.env.MapPropertySource;
import org.springframework.core.env.StandardEnvironment;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class Main {
static AnnotationConfigApplicationContext runWithApplicationContext() {
AnnotationConfigApplicationContext ac = new AnnotationConfigApplicationContext();
ac.getEnvironment().setActiveProfiles(ServiceConfiguration.PROFILE_VIDEO_YOUTUBE);
ac.register(ServiceConfiguration.class);
ac.refresh();
return ac;
}
public static void main(String[] arrrImAPirate) throws Throwable {
ApplicationContext applicationContext = runWithApplicationContext();
//showEnvironment(applicationContext);
//showPropertySource(applicationContext);
showVideos(applicationContext);
}
private static void showPropertySource(ApplicationContext applicationContext) {
System.out.println();
System.out.println("************ Property Source ***********");
Map<String, Object> map = new HashMap<String, Object>();
map.put("db.username", "scott");
map.put("db.password", "tiger");
MapPropertySource mapPropertySource = new MapPropertySource("dbConfig", map);
((StandardEnvironment) applicationContext.getEnvironment()).getPropertySources().addFirst(mapPropertySource);
System.out.println("DB Username: " + applicationContext.getEnvironment().getProperty("db.username"));
System.out.println("DB Password: " + applicationContext.getEnvironment().getProperty("db.password"));
System.out.println();
System.out.println("DB Url from @PropertySource: " + applicationContext.getEnvironment().getProperty("db.url"));
System.out.println();
}
private static void showVideos(ApplicationContext applicationContext) throws Exception {
VideoSearch videoSearch = applicationContext.getBean(VideoSearch.class);
List<String> videoTitles = videoSearch.lookupVideo("Kevin Nilson");
System.out.println();
System.out.println("************** VIDEO SEARCH RESULTS - YOUTUBE ************** ");
for (String title : videoTitles) {
System.out.println(title);
}
}
private static void showEnvironment(ApplicationContext applicationContext) {
System.out.println();
System.out.println("************ Environment ***********");
System.out.println("User Dir: " + applicationContext.getEnvironment().getProperty("user.dir"));
System.out.println();
}
} | joshlong/adaptive-spring | core/src/test/java/savetheenvironment/profiles/mocking/Main.java | Java | apache-2.0 | 2,787 |
package rmi;
import java.rmi.RemoteException;
import java.rmi.registry.LocateRegistry;
import java.rmi.registry.Registry;
import java.rmi.server.UnicastRemoteObject;
public class MyRemoteClass implements MyRemoteInterface {
public String[] sayYourName(String name) throws RemoteException {
System.err.println("remote reference");
return new String[] { "kick", name };
}
public static void main(String[] args) {
try {
MyRemoteClass myRemoteClass = new MyRemoteClass();
MyRemoteInterface myRemoteInterface = (MyRemoteInterface) UnicastRemoteObject
.exportObject(myRemoteClass, 0);
Registry registry = LocateRegistry.getRegistry();
registry.rebind("myRemoteInterface", myRemoteInterface);
System.err.println("system ready!!");
} catch (RemoteException e) {
e.printStackTrace();
}
}
public Boolean checkIfSuccess() throws RemoteException {
return true;
}
}
| myrosicky/projects | weibo/src/main/java/rmi/MyRemoteClass.java | Java | apache-2.0 | 932 |
/*
* Copyright 2003-2005 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.wsrp4j.persistence.xml;
import org.apache.wsrp4j.commons.persistence.PersistentInformation;
/**
* This class defines the interface for persistent information needed
* to store and retrieve PersistentDataObjects with castor XML support.
*
* @version $Id: PersistentInformationXML.java 374672 2006-02-03 14:10:58Z cziegeler $
*/
public interface PersistentInformationXML extends PersistentInformation {
/**
* Set the Store directory for the persistent XML files
*
* @param storeDirectory String name of the store
*/
void setStoreDirectory(String storeDirectory);
/**
* Returns the directory for the persistent XML files
*
* @return String nanme of the store
*/
String getStoreDirectory();
/**
* Set the Castor XML mapping file name, fully qualified
*
* @param mappingFileName String fully qualified filename
*/
void setMappingFileName(String mappingFileName);
/**
* Returns the XML mapping file name, fully qualified
*
* @return String fully qualified filename
*/
String getMappingFileName();
/**
* Set the file name stub for persistent XML files. The name contains the
* store directory followed by a file separator and the class name of the
* object to be restored.
*
* @param stub String file name stub
*/
void setFilenameStub(String stub);
/**
* Returns the file name stub for persistent XML files. @see setFilenameStub
*
* @return String file name stub
*/
String getFilenameStub();
/**
* Returns a fully qualified file name for a persistent XML file.
*
* @return String file name
*/
String getFilename();
/**
* Set the fully qualified file name for a persistent XML file.
*
* @param filename String file name
*/
void setFilename(String filename);
/**
* Updates the file name, enhanced by a string token, like a handle to
* idportlet a unique persistent XML file. If a groupID is set, the
* groupID is used instead of the token to build the filename.
*
* @param token String token, like a handle
*/
void updateFileName(String token);
/**
* Returns the file extension used for persistent XML files
*/
String getExtension();
/**
* Set the file extension for persistent XML files.
*
* @param extension String file extension
*/
void setExtension(String extension);
/**
* Set the Separator, to be used in a fully qualified file name.
*
* @return String Separator character
*/
String getSeparator();
/**
* Set the separator character. (e.g. '@')
*
* @param separator String Separator character
*/
void setSeparator(String separator);
/**
* @return this object as String
*/
String toString();
}
| axeolotl/wsrp4cxf | persistence-xml/src/java/org/apache/wsrp4j/persistence/xml/PersistentInformationXML.java | Java | apache-2.0 | 3,611 |
/*
* Licensed to The Apereo Foundation under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* The Apereo Foundation licenses this file to you under the Apache License,
* Version 2.0, (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.tle.web.copyright;
import com.tle.beans.item.Item;
import com.tle.beans.item.ItemId;
import com.tle.beans.item.ItemKey;
import com.tle.beans.item.attachments.Attachment;
import com.tle.beans.item.attachments.IAttachment;
import com.tle.core.activation.ActivationConstants;
import com.tle.core.copyright.Holding;
import com.tle.core.copyright.Portion;
import com.tle.core.copyright.Section;
import com.tle.core.copyright.service.AgreementStatus;
import com.tle.core.copyright.service.CopyrightService;
import com.tle.core.security.TLEAclManager;
import com.tle.web.viewitem.FilestoreContentFilter;
import com.tle.web.viewitem.FilestoreContentStream;
import com.tle.web.viewurl.ViewAttachmentUrl;
import com.tle.web.viewurl.ViewItemUrl;
import com.tle.web.viewurl.ViewItemUrlFactory;
import java.io.IOException;
import javax.inject.Inject;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
public abstract class AbstractCopyrightFilestoreFilter<
H extends Holding, P extends Portion, S extends Section>
implements FilestoreContentFilter {
private static final Log LOGGER = LogFactory.getLog(AbstractCopyrightFilestoreFilter.class);
@Inject private ViewItemUrlFactory urlFactory;
@Inject private TLEAclManager aclService;
@Override
public FilestoreContentStream filter(
FilestoreContentStream contentStream,
HttpServletRequest request,
HttpServletResponse response)
throws IOException {
String filepath = contentStream.getFilepath();
ItemKey itemKey = contentStream.getItemId();
CopyrightService<H, P, S> copyrightService = getCopyrightService();
ItemId itemId = ItemId.fromKey(itemKey);
Item item = copyrightService.getCopyrightedItem(itemId);
if (item != null) {
Attachment attachment = copyrightService.getSectionAttachmentForFilepath(item, filepath);
if (attachment == null) {
return contentStream;
}
AgreementStatus status;
try {
status = copyrightService.getAgreementStatus(item, attachment);
} catch (IllegalStateException bad) {
LOGGER.error("Error getting AgreementStatus", bad); // $NON-NLS-1$
return contentStream;
}
if (status.isInactive()
&& aclService
.filterNonGrantedPrivileges(ActivationConstants.VIEW_INACTIVE_PORTIONS)
.isEmpty()) {
throw copyrightService.createViolation(item);
}
if (status.isNeedsAgreement()) {
// FIXME: This creates /items/ urls, what if they came from
// /integ/ ?
ViewItemUrl vurl = urlFactory.createFullItemUrl(itemKey);
vurl.add(new ViewAttachmentUrl(attachment.getUuid()));
response.sendRedirect(vurl.getHref());
return null;
}
}
return contentStream;
}
@Override
public boolean canView(Item i, IAttachment attach) {
CopyrightService<H, P, S> copyrightService = getCopyrightService();
Item item = copyrightService.getCopyrightedItem(i.getItemId());
if (item != null) {
AgreementStatus status;
try {
status = copyrightService.getAgreementStatus(item, attach);
} catch (IllegalStateException bad) {
return false;
}
if (status.isNeedsAgreement()) {
return false;
}
}
return true;
}
protected abstract CopyrightService<H, P, S> getCopyrightService();
}
| equella/Equella | Source/Plugins/Core/com.equella.core/src/com/tle/web/copyright/AbstractCopyrightFilestoreFilter.java | Java | apache-2.0 | 4,308 |
# Copyright (C) 2014 Universidad Politecnica de Madrid
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from keystone import exception
from keystone.auth import plugins as auth_plugins
from keystone.common import dependency
from keystone.openstack.common import log
from oauthlib.oauth2 import RequestValidator
try: from oslo.utils import timeutils
except ImportError: from keystone.openstack.common import timeutils
METHOD_NAME = 'oauth2_validator'
LOG = log.getLogger(__name__)
@dependency.requires('oauth2_api')
class OAuth2Validator(RequestValidator):
"""OAuthlib request validator."""
# Ordered roughly in order of appearance in the authorization grant flow
# Pre- and post-authorization.
def validate_client_id(self, client_id, request, *args, **kwargs):
# Simple validity check, does client exist? Not banned?
client_dict = self.oauth2_api.get_consumer(client_id)
if client_dict:
return True
# NOTE(garcianavalon) Currently the sql driver raises an exception
# if the consumer doesnt exist so we throw the Keystone NotFound
# 404 Not Found exception instead of the OAutlib InvalidClientId
# 400 Bad Request exception.
return False
def validate_redirect_uri(self, client_id, redirect_uri, request, *args, **kwargs):
# Is the client allowed to use the supplied redirect_uri? i.e. has
# the client previously registered this EXACT redirect uri.
client_dict = self.oauth2_api.get_consumer(client_id)
registered_uris = client_dict['redirect_uris']
return redirect_uri in registered_uris
def get_default_redirect_uri(self, client_id, request, *args, **kwargs):
# The redirect used if none has been supplied.
# Prefer your clients to pre register a redirect uri rather than
# supplying one on each authorization request.
# TODO(garcianavalon) implement
pass
def validate_scopes(self, client_id, scopes, client, request, *args, **kwargs):
# Is the client allowed to access the requested scopes?
if not scopes:
return True # the client is not requesting any scope
client_dict = self.oauth2_api.get_consumer(client_id)
if not client_dict['scopes']:
return False # the client isnt allowed any scopes
for scope in scopes:
if not scope in client_dict['scopes']:
return False
return True
def get_default_scopes(self, client_id, request, *args, **kwargs):
# Scopes a client will authorize for if none are supplied in the
# authorization request.
# TODO(garcianavalon) implement
pass
def validate_response_type(self, client_id, response_type, client, request, *args, **kwargs):
# Clients should only be allowed to use one type of response type, the
# one associated with their one allowed grant type.
# FIXME(garcianavalon) we need to support multiple grant types
# for the same consumers right now. In the future we should
# separate them and only allow one grant type (registering
# each client one time for each grant or allowing components)
# or update the tools to allow to create clients with
# multiple grants
# client_dict = self.oauth2_api.get_consumer(client_id)
# allowed_response_type = client_dict['response_type']
# return allowed_response_type == response_type
return True
# Post-authorization
def save_authorization_code(self, client_id, code, request, *args, **kwargs):
# Remember to associate it with request.scopes, request.redirect_uri
# request.client, request.state and request.user (the last is passed in
# post_authorization credentials, i.e. { 'user': request.user}.
authorization_code = {
'code': code['code'], # code is a dict with state and the code
'consumer_id': client_id,
'scopes': request.scopes,
'authorizing_user_id': request.user_id, # populated through the credentials
'state': request.state,
'redirect_uri': request.redirect_uri
}
token_duration = 28800 # TODO(garcianavalon) extract as configuration option
# TODO(garcianavalon) find a better place to do this
now = timeutils.utcnow()
future = now + datetime.timedelta(seconds=token_duration)
expiry_date = timeutils.isotime(future, subsecond=True)
authorization_code['expires_at'] = expiry_date
self.oauth2_api.store_authorization_code(authorization_code)
# Token request
def authenticate_client(self, request, *args, **kwargs):
# Whichever authentication method suits you, HTTP Basic might work
# TODO(garcianavalon) write it cleaner
LOG.debug('OAUTH2: authenticating client')
authmethod, auth = request.headers['Authorization'].split(' ', 1)
auth = auth.decode('unicode_escape')
if authmethod.lower() == 'basic':
auth = auth.decode('base64')
client_id, secret = auth.split(':', 1)
client_dict = self.oauth2_api.get_consumer_with_secret(client_id)
if client_dict['secret'] == secret:
# TODO(garcianavalon) this can be done in a cleaner way
#if we change the consumer model attribute to client_id
request.client = type('obj', (object,),
{'client_id' : client_id})
LOG.info('OAUTH2: succesfully authenticated client %s',
client_dict['name'])
return True
return False
def authenticate_client_id(self, client_id, request, *args, **kwargs):
# Don't allow public (non-authenticated) clients
# TODO(garcianavalon) check this method
return False
def validate_code(self, client_id, code, client, request, *args, **kwargs):
# Validate the code belongs to the client. Add associated scopes,
# state and user to request.scopes, request.state and request.user.
authorization_code = self.oauth2_api.get_authorization_code(code)
if not authorization_code['valid']:
return False
if not authorization_code['consumer_id'] == request.client.client_id:
return False
request.scopes = authorization_code['scopes']
request.state = authorization_code['state']
request.user = authorization_code['authorizing_user_id']
return True
def confirm_redirect_uri(self, client_id, code, redirect_uri, client, *args, **kwargs):
# You did save the redirect uri with the authorization code right?
authorization_code = self.oauth2_api.get_authorization_code(code)
return authorization_code['redirect_uri'] == redirect_uri
def validate_grant_type(self, client_id, grant_type, client, request, *args, **kwargs):
# Clients should only be allowed to use one type of grant.
# FIXME(garcianavalon) we need to support multiple grant types
# for the same consumers right now. In the future we should
# separate them and only allow one grant type (registering
# each client one time for each grant or allowing components)
# or update the tools to allow to create clients with
# multiple grants
# # client_id comes as None, we use the one in request
# client_dict = self.oauth2_api.get_consumer(request.client.client_id)
# return grant_type == client_dict['grant_type']
# TODO(garcianavalon) sync with SQL backend soported grant_types
return grant_type in [
'password', 'authorization_code', 'client_credentials', 'refresh_token',
]
def save_bearer_token(self, token, request, *args, **kwargs):
# Remember to associate it with request.scopes, request.user and
# request.client. The two former will be set when you validate
# the authorization code. Don't forget to save both the
# access_token and the refresh_token and set expiration for the
# access_token to now + expires_in seconds.
# token is a dictionary with the following elements:
# {
# u'access_token': u'iC1DQuu7zOgNIjquPXPmXE5hKnTwgu',
# u'expires_in': 3600,
# u'token_type': u'Bearer',
# u'state': u'yKxWeujbz9VUBncQNrkWvVcx8EXl1w',
# u'scope': u'basic_scope',
# u'refresh_token': u'02DTsL6oWgAibU7xenvXttwG80trJC'
# }
# TODO(garcinanavalon) create a custom TokenCreator instead of
# hacking the dictionary
if getattr(request, 'client', None):
consumer_id = request.client.client_id
else:
consumer_id = request.client_id
if getattr(request, 'user', None):
user_id = request.user
else:
user_id = request.user_id
expires_at = datetime.datetime.today() + datetime.timedelta(seconds=token['expires_in'])
access_token = {
'id':token['access_token'],
'consumer_id':consumer_id,
'authorizing_user_id':user_id,
'scopes': request.scopes,
'expires_at':datetime.datetime.strftime(expires_at, '%Y-%m-%d %H:%M:%S'),
'refresh_token': token.get('refresh_token', None),
}
self.oauth2_api.store_access_token(access_token)
def invalidate_authorization_code(self, client_id, code, request, *args, **kwargs):
# Authorization codes are use once, invalidate it when a Bearer token
# has been acquired.
self.oauth2_api.invalidate_authorization_code(code)
# Protected resource request
def validate_bearer_token(self, token, scopes, request):
# Remember to check expiration and scope membership
try:
access_token = self.oauth2_api.get_access_token(token)
except exception.NotFound:
return False
if (datetime.datetime.strptime(access_token['expires_at'], '%Y-%m-%d %H:%M:%S')
< datetime.datetime.today()):
return False
if access_token['scopes'] != scopes:
return False
# NOTE(garcianavalon) we set some attributes in request for later use. There
# is no documentation about this so I follow the comments found in the example
# at https://oauthlib.readthedocs.org/en/latest/oauth2/endpoints/resource.html
# which are:
# oauthlib_request has a few convenient attributes set such as
# oauthlib_request.client = the client associated with the token
# oauthlib_request.user = the user associated with the token
# oauthlib_request.scopes = the scopes bound to this token
# request.scopes is set by oauthlib already
request.user = access_token['authorizing_user_id']
request.client = access_token['consumer_id']
return True
# Token refresh request
def get_original_scopes(self, refresh_token, request, *args, **kwargs):
# Obtain the token associated with the given refresh_token and
# return its scopes, these will be passed on to the refreshed
# access token if the client did not specify a scope during the
# request.
# TODO(garcianavalon)
return ['all_info']
def is_within_original_scope(self, request_scopes, refresh_token, request, *args, **kwargs):
"""Check if requested scopes are within a scope of the refresh token.
When access tokens are refreshed the scope of the new token
needs to be within the scope of the original token. This is
ensured by checking that all requested scopes strings are on
the list returned by the get_original_scopes. If this check
fails, is_within_original_scope is called. The method can be
used in situations where returning all valid scopes from the
get_original_scopes is not practical.
:param request_scopes: A list of scopes that were requested by client
:param refresh_token: Unicode refresh_token
:param request: The HTTP Request (oauthlib.common.Request)
:rtype: True or False
Method is used by:
- Refresh token grant
"""
# TODO(garcianavalon)
return True
def validate_refresh_token(self, refresh_token, client, request, *args, **kwargs):
"""Ensure the Bearer token is valid and authorized access to scopes.
OBS! The request.user attribute should be set to the resource owner
associated with this refresh token.
:param refresh_token: Unicode refresh token
:param client: Client object set by you, see authenticate_client.
:param request: The HTTP Request (oauthlib.common.Request)
:rtype: True or False
Method is used by:
- Authorization Code Grant (indirectly by issuing refresh tokens)
- Resource Owner Password Credentials Grant (also indirectly)
- Refresh Token Grant
"""
try:
access_token = self.oauth2_api.get_access_token_by_refresh_token(refresh_token)
# Validate that the refresh token is not expired
token_duration = 28800 # TODO(garcianavalon) extract as configuration option
refresh_token_duration = 14 # TODO(garcianavalon) extract as configuration option
# TODO(garcianavalon) find a better place to do this
access_token_expiration_date = datetime.datetime.strptime(
access_token['expires_at'], '%Y-%m-%d %H:%M:%S')
refres_token_expiration_date = (
access_token_expiration_date
- datetime.timedelta(seconds=token_duration)
+ datetime.timedelta(days=refresh_token_duration))
if refres_token_expiration_date < datetime.datetime.today():
return False
except exception.NotFound:
return False
request.user = access_token['authorizing_user_id']
return True
# Support for password grant
def validate_user(self, username, password, client, request,
*args, **kwargs):
"""Ensure the username and password is valid.
OBS! The validation should also set the user attribute of the request
to a valid resource owner, i.e. request.user = username or similar. If
not set you will be unable to associate a token with a user in the
persistance method used (commonly, save_bearer_token).
:param username: Unicode username
:param password: Unicode password
:param client: Client object set by you, see authenticate_client.
:param request: The HTTP Request (oauthlib.common.Request)
:rtype: True or False
Method is used by:
- Resource Owner Password Credentials Grant
"""
# To validate the user, try to authenticate it
password_plugin = auth_plugins.password.Password()
auth_payload = {
'user': {
"domain": {
"id": "default"
},
"name": username,
"password": password
}
}
auth_context = {}
try:
password_plugin.authenticate(
context={},
auth_payload=auth_payload,
auth_context=auth_context)
# set the request user
request.user = auth_context['user_id']
return True
except Exception:
return False
| ging/keystone | keystone/contrib/oauth2/validator.py | Python | apache-2.0 | 16,189 |
/****Developed by Chanisco Tromp*****/
using UnityEngine;
using System.Collections;
public class Arrow : MonoBehaviour {
// Update is called once per frame
float speed;
void Update () {
if(ArrowSpawn.i < 40){
speed = 5 * Time.deltaTime;
}else{
speed = 7 * Time.deltaTime;
}
transform.Translate(speed,0,0);
}
}
| Chanisco/GameJam_WarmingUp | Warmyparmy/Assets/Scripts/Arrow.cs | C# | apache-2.0 | 333 |
/*
* Copyright 2006-2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.consol.citrus.validation;
import com.consol.citrus.context.TestContext;
import com.consol.citrus.exceptions.ValidationException;
import com.consol.citrus.message.Message;
import com.consol.citrus.validation.context.ValidationContext;
import org.springframework.util.StringUtils;
/**
* Basic message validator is able to verify empty message payloads. Both received and control message must have
* empty message payloads otherwise ths validator will raise some exception.
*
* @author Christoph Deppisch
*/
public class DefaultEmptyMessageValidator extends DefaultMessageValidator {
@Override
public void validateMessage(Message receivedMessage, Message controlMessage,
TestContext context, ValidationContext validationContext) {
if (controlMessage == null || controlMessage.getPayload() == null) {
log.debug("Skip message payload validation as no control message was defined");
return;
}
if (StringUtils.hasText(controlMessage.getPayload(String.class))) {
throw new ValidationException("Empty message validation failed - control message is not empty!");
}
log.debug("Start to verify empty message payload ...");
if (log.isDebugEnabled()) {
log.debug("Received message:\n" + receivedMessage);
log.debug("Control message:\n" + controlMessage);
}
if (StringUtils.hasText(receivedMessage.getPayload(String.class))) {
throw new ValidationException("Validation failed - received message content is not empty!") ;
}
log.info("Message payload is empty as expected: All values OK");
}
}
| christophd/citrus | core/citrus-api/src/main/java/com/consol/citrus/validation/DefaultEmptyMessageValidator.java | Java | apache-2.0 | 2,319 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.core.datamap;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.carbondata.common.annotations.InterfaceAudience;
import org.apache.carbondata.common.exceptions.MetadataProcessException;
import org.apache.carbondata.common.exceptions.sql.MalformedDataMapCommandException;
import org.apache.carbondata.common.exceptions.sql.NoSuchDataMapException;
import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datamap.dev.DataMapFactory;
import org.apache.carbondata.core.indexstore.BlockletDetailsFetcher;
import org.apache.carbondata.core.indexstore.SegmentPropertiesFetcher;
import org.apache.carbondata.core.indexstore.blockletindex.BlockletDataMapFactory;
import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier;
import org.apache.carbondata.core.metadata.CarbonMetadata;
import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
import org.apache.carbondata.core.metadata.schema.table.DataMapSchema;
import org.apache.carbondata.core.metadata.schema.table.DataMapSchemaStorageProvider;
import org.apache.carbondata.core.metadata.schema.table.DiskBasedDMSchemaStorageProvider;
import org.apache.carbondata.core.metadata.schema.table.RelationIdentifier;
import org.apache.carbondata.core.mutate.SegmentUpdateDetails;
import org.apache.carbondata.core.mutate.UpdateVO;
import org.apache.carbondata.core.statusmanager.SegmentRefreshInfo;
import org.apache.carbondata.core.statusmanager.SegmentUpdateStatusManager;
import org.apache.carbondata.core.util.CarbonProperties;
import org.apache.carbondata.core.util.CarbonSessionInfo;
import org.apache.carbondata.core.util.ThreadLocalSessionInfo;
import static org.apache.carbondata.core.metadata.schema.datamap.DataMapClassProvider.MV;
import static org.apache.carbondata.core.metadata.schema.datamap.DataMapClassProvider.PREAGGREGATE;
/**
* It maintains all the DataMaps in it.
*/
@InterfaceAudience.Internal
public final class DataMapStoreManager {
private static DataMapStoreManager instance = new DataMapStoreManager();
public Map<String, List<TableDataMap>> getAllDataMaps() {
return allDataMaps;
}
/**
* Contains the list of datamaps for each table.
*/
private Map<String, List<TableDataMap>> allDataMaps = new ConcurrentHashMap<>();
/**
* Contains the datamap catalog for each datamap provider.
*/
private Map<String, DataMapCatalog> dataMapCatalogs = null;
private Map<String, TableSegmentRefresher> segmentRefreshMap = new ConcurrentHashMap<>();
private DataMapSchemaStorageProvider provider = new DiskBasedDMSchemaStorageProvider(
CarbonProperties.getInstance().getSystemFolderLocation());
private static final LogService LOGGER =
LogServiceFactory.getLogService(DataMapStoreManager.class.getName());
private DataMapStoreManager() {
}
/**
* It only gives the visible datamaps
*/
List<TableDataMap> getAllVisibleDataMap(CarbonTable carbonTable) throws IOException {
CarbonSessionInfo sessionInfo = ThreadLocalSessionInfo.getCarbonSessionInfo();
List<TableDataMap> allDataMaps = getAllDataMap(carbonTable);
Iterator<TableDataMap> dataMapIterator = allDataMaps.iterator();
while (dataMapIterator.hasNext()) {
TableDataMap dataMap = dataMapIterator.next();
String dbName = carbonTable.getDatabaseName();
String tableName = carbonTable.getTableName();
String dmName = dataMap.getDataMapSchema().getDataMapName();
// TODO: need support get the visible status of datamap without sessionInfo in the future
if (sessionInfo != null) {
boolean isDmVisible = sessionInfo.getSessionParams().getProperty(
String.format("%s%s.%s.%s", CarbonCommonConstants.CARBON_DATAMAP_VISIBLE,
dbName, tableName, dmName), "true").trim().equalsIgnoreCase("true");
if (!isDmVisible) {
LOGGER.warn(String.format("Ignore invisible datamap %s on table %s.%s",
dmName, dbName, tableName));
dataMapIterator.remove();
}
} else {
String message = "Carbon session info is null";
LOGGER.info(message);
}
}
return allDataMaps;
}
/**
* It gives all datamaps except the default datamap.
*
* @return
*/
public List<TableDataMap> getAllDataMap(CarbonTable carbonTable) throws IOException {
List<DataMapSchema> dataMapSchemas = getDataMapSchemasOfTable(carbonTable);
List<TableDataMap> dataMaps = new ArrayList<>();
if (dataMapSchemas != null) {
for (DataMapSchema dataMapSchema : dataMapSchemas) {
RelationIdentifier identifier = dataMapSchema.getParentTables().get(0);
if (dataMapSchema.isIndexDataMap() && identifier.getTableId()
.equals(carbonTable.getTableId())) {
dataMaps.add(getDataMap(carbonTable, dataMapSchema));
}
}
}
return dataMaps;
}
/**
* It gives all datamap schemas of a given table.
*
*/
public List<DataMapSchema> getDataMapSchemasOfTable(CarbonTable carbonTable) throws IOException {
return provider.retrieveSchemas(carbonTable);
}
/**
* It gives all datamap schemas from store.
*/
public List<DataMapSchema> getAllDataMapSchemas() throws IOException {
return provider.retrieveAllSchemas();
}
public DataMapSchema getDataMapSchema(String dataMapName)
throws NoSuchDataMapException, IOException {
return provider.retrieveSchema(dataMapName);
}
/**
* Saves the datamap schema to storage
* @param dataMapSchema
*/
public void saveDataMapSchema(DataMapSchema dataMapSchema) throws IOException {
provider.saveSchema(dataMapSchema);
}
/**
* Drops the datamap schema from storage
* @param dataMapName
*/
public void dropDataMapSchema(String dataMapName) throws IOException {
provider.dropSchema(dataMapName);
}
/**
* Update the datamap schema after table rename
* This should be invoked after changing table name
* @param dataMapSchemaList
* @param newTableName
*/
public void updateDataMapSchema(List<DataMapSchema> dataMapSchemaList,
String newTableName) throws IOException {
List<DataMapSchema> newDataMapSchemas = new ArrayList<>();
for (DataMapSchema dataMapSchema : dataMapSchemaList) {
RelationIdentifier relationIdentifier = dataMapSchema.getRelationIdentifier();
String dataBaseName = relationIdentifier.getDatabaseName();
String tableId = relationIdentifier.getTableId();
String providerName = dataMapSchema.getProviderName();
// if the preaggregate datamap,not be modified the schema
if (providerName.equalsIgnoreCase(PREAGGREGATE.toString())) {
continue;
}
// if the mv datamap,not be modified the relationIdentifier
if (!providerName.equalsIgnoreCase(MV.toString())) {
RelationIdentifier newRelationIdentifier = new RelationIdentifier(dataBaseName,
newTableName, tableId);
dataMapSchema.setRelationIdentifier(newRelationIdentifier);
}
List<RelationIdentifier> newParentTables = new ArrayList<>();
List<RelationIdentifier> parentTables = dataMapSchema.getParentTables();
for (RelationIdentifier identifier : parentTables) {
RelationIdentifier newParentTableIdentifier = new RelationIdentifier(
identifier.getDatabaseName(), newTableName, identifier.getTableId());
newParentTables.add(newParentTableIdentifier);
}
dataMapSchema.setParentTables(newParentTables);
newDataMapSchemas.add(dataMapSchema);
// frist drop old schema
String dataMapName = dataMapSchema.getDataMapName();
dropDataMapSchema(dataMapName);
}
// save new datamap schema to storage
for (DataMapSchema newDataMapSchema : newDataMapSchemas) {
saveDataMapSchema(newDataMapSchema);
}
}
/**
* Register datamap catalog for the datamap provider
* @param dataMapProvider
* @param dataMapSchema
*/
public synchronized void registerDataMapCatalog(DataMapProvider dataMapProvider,
DataMapSchema dataMapSchema) throws IOException {
initializeDataMapCatalogs(dataMapProvider);
String name = dataMapSchema.getProviderName();
DataMapCatalog dataMapCatalog = dataMapCatalogs.get(name);
if (dataMapCatalog == null) {
dataMapCatalog = dataMapProvider.createDataMapCatalog();
if (dataMapCatalog != null) {
dataMapCatalogs.put(name, dataMapCatalog);
dataMapCatalog.registerSchema(dataMapSchema);
}
} else {
dataMapCatalog.registerSchema(dataMapSchema);
}
}
/**
* Unregister datamap catalog.
* @param dataMapSchema
*/
public synchronized void unRegisterDataMapCatalog(DataMapSchema dataMapSchema) {
if (dataMapCatalogs == null) {
return;
}
String name = dataMapSchema.getProviderName();
DataMapCatalog dataMapCatalog = dataMapCatalogs.get(name);
if (dataMapCatalog != null) {
dataMapCatalog.unregisterSchema(dataMapSchema.getDataMapName());
}
}
/**
* Get the datamap catalog for provider.
* @param providerName
* @return
*/
public synchronized DataMapCatalog getDataMapCatalog(DataMapProvider dataMapProvider,
String providerName) throws IOException {
initializeDataMapCatalogs(dataMapProvider);
return dataMapCatalogs.get(providerName);
}
/**
* Initialize by reading all datamaps from store and re register it
* @param dataMapProvider
*/
private void initializeDataMapCatalogs(DataMapProvider dataMapProvider) throws IOException {
if (dataMapCatalogs == null) {
dataMapCatalogs = new ConcurrentHashMap<>();
List<DataMapSchema> dataMapSchemas = getAllDataMapSchemas();
for (DataMapSchema schema : dataMapSchemas) {
DataMapCatalog dataMapCatalog = dataMapCatalogs.get(schema.getProviderName());
if (dataMapCatalog == null) {
dataMapCatalog = dataMapProvider.createDataMapCatalog();
if (null == dataMapCatalog) {
throw new RuntimeException("Internal Error.");
}
dataMapCatalogs.put(schema.getProviderName(), dataMapCatalog);
}
try {
dataMapCatalog.registerSchema(schema);
} catch (Exception e) {
// Ignore the schema
LOGGER.error(e, "Error while registering schema");
}
}
}
}
/**
* It gives the default datamap of the table. Default datamap of any table is BlockletDataMap
*
* @param table
* @return
*/
public TableDataMap getDefaultDataMap(CarbonTable table) {
return getDataMap(table, BlockletDataMapFactory.DATA_MAP_SCHEMA);
}
/**
* Get the datamap for reading data.
*/
public TableDataMap getDataMap(CarbonTable table, DataMapSchema dataMapSchema) {
String tableUniqueName =
table.getAbsoluteTableIdentifier().getCarbonTableIdentifier().getTableUniqueName();
List<TableDataMap> tableIndices = allDataMaps.get(tableUniqueName);
TableDataMap dataMap = null;
if (tableIndices != null) {
dataMap = getTableDataMap(dataMapSchema.getDataMapName(), tableIndices);
}
if (dataMap == null) {
synchronized (tableUniqueName.intern()) {
tableIndices = allDataMaps.get(tableUniqueName);
if (tableIndices != null) {
dataMap = getTableDataMap(dataMapSchema.getDataMapName(), tableIndices);
}
if (dataMap == null) {
try {
dataMap = createAndRegisterDataMap(table, dataMapSchema);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
}
if (dataMap == null) {
throw new RuntimeException("Datamap does not exist");
}
return dataMap;
}
/**
* Return a new datamap instance and registered in the store manager.
* The datamap is created using datamap name, datamap factory class and table identifier.
*/
public DataMapFactory getDataMapFactoryClass(CarbonTable table, DataMapSchema dataMapSchema)
throws MalformedDataMapCommandException {
try {
// try to create datamap by reflection to test whether it is a valid DataMapFactory class
return (DataMapFactory)
Class.forName(dataMapSchema.getProviderName()).getConstructors()[0]
.newInstance(table, dataMapSchema);
} catch (ClassNotFoundException e) {
// try to create DataMapClassProvider instance by taking providerName as short name
return DataMapRegistry.getDataMapFactoryByShortName(table, dataMapSchema);
} catch (Throwable e) {
throw new MetadataProcessException(
"failed to get DataMap factory for'" + dataMapSchema.getProviderName() + "'", e);
}
}
/**
* registered in the store manager.
* The datamap is created using datamap name, datamap factory class and table identifier.
*/
// TODO: make it private
public TableDataMap createAndRegisterDataMap(CarbonTable table,
DataMapSchema dataMapSchema) throws MalformedDataMapCommandException {
DataMapFactory dataMapFactory = getDataMapFactoryClass(table, dataMapSchema);
return registerDataMap(table, dataMapSchema, dataMapFactory);
}
public TableDataMap registerDataMap(CarbonTable table,
DataMapSchema dataMapSchema, DataMapFactory dataMapFactory) {
String tableUniqueName = table.getCarbonTableIdentifier().getTableUniqueName();
// Just update the segmentRefreshMap with the table if not added.
getTableSegmentRefresher(table);
List<TableDataMap> tableIndices = allDataMaps.get(tableUniqueName);
if (tableIndices == null) {
tableIndices = new ArrayList<>();
}
BlockletDetailsFetcher blockletDetailsFetcher;
SegmentPropertiesFetcher segmentPropertiesFetcher = null;
if (dataMapFactory instanceof BlockletDetailsFetcher) {
blockletDetailsFetcher = (BlockletDetailsFetcher) dataMapFactory;
} else {
blockletDetailsFetcher = getBlockletDetailsFetcher(table);
}
segmentPropertiesFetcher = (SegmentPropertiesFetcher) blockletDetailsFetcher;
TableDataMap dataMap = new TableDataMap(table.getAbsoluteTableIdentifier(),
dataMapSchema, dataMapFactory, blockletDetailsFetcher, segmentPropertiesFetcher);
tableIndices.add(dataMap);
allDataMaps.put(tableUniqueName, tableIndices);
return dataMap;
}
private TableDataMap getTableDataMap(String dataMapName, List<TableDataMap> tableIndices) {
TableDataMap dataMap = null;
for (TableDataMap tableDataMap : tableIndices) {
if (tableDataMap.getDataMapSchema().getDataMapName().equals(dataMapName)) {
dataMap = tableDataMap;
break;
}
}
return dataMap;
}
/**
* Clear the invalid segments from all the datamaps of the table
* @param carbonTable
* @param segments
*/
public void clearInvalidSegments(CarbonTable carbonTable, List<Segment> segments)
throws IOException {
getDefaultDataMap(carbonTable).clear(segments);
List<TableDataMap> allDataMap = getAllDataMap(carbonTable);
for (TableDataMap dataMap: allDataMap) {
dataMap.clear(segments);
}
}
/**
* Clear the datamap/datamaps of a table from memory
*
* @param identifier Table identifier
*/
public void clearDataMaps(AbsoluteTableIdentifier identifier) {
CarbonTable carbonTable = getCarbonTable(identifier);
String tableUniqueName = identifier.getCarbonTableIdentifier().getTableUniqueName();
List<TableDataMap> tableIndices = allDataMaps.get(tableUniqueName);
if (null != carbonTable && tableIndices != null) {
try {
DataMapUtil.executeDataMapJobForClearingDataMaps(carbonTable);
} catch (IOException e) {
LOGGER.error(e, "clear dataMap job failed");
// ignoring the exception
}
}
segmentRefreshMap.remove(identifier.uniqueName());
clearDataMaps(tableUniqueName);
allDataMaps.remove(tableUniqueName);
}
/**
* This method returns the carbonTable from identifier
* @param identifier
* @return
*/
public CarbonTable getCarbonTable(AbsoluteTableIdentifier identifier) {
CarbonTable carbonTable = null;
carbonTable = CarbonMetadata.getInstance()
.getCarbonTable(identifier.getDatabaseName(), identifier.getTableName());
if (carbonTable == null) {
try {
carbonTable = CarbonTable
.buildFromTablePath(identifier.getTableName(), identifier.getDatabaseName(),
identifier.getTablePath(), identifier.getCarbonTableIdentifier().getTableId());
} catch (IOException e) {
LOGGER.error("failed to get carbon table from table Path");
// ignoring exception
}
}
return carbonTable;
}
/**
* this methods clears the datamap of table from memory
*/
public void clearDataMaps(String tableUniqName) {
List<TableDataMap> tableIndices = allDataMaps.get(tableUniqName);
if (tableIndices != null) {
for (TableDataMap tableDataMap : tableIndices) {
if (tableDataMap != null) {
// clear the segmentMap in BlockletDetailsFetcher,else the Segment will remain in executor
// and the query fails as we will check whether the blocklet contains in the index or not
tableDataMap.getBlockletDetailsFetcher().clear();
tableDataMap.clear();
}
}
}
allDataMaps.remove(tableUniqName);
}
/**
* Clear the datamap/datamaps of a table from memory and disk
*
* @param identifier Table identifier
*/
public void clearDataMap(AbsoluteTableIdentifier identifier, String dataMapName) {
CarbonTable carbonTable = getCarbonTable(identifier);
String tableUniqueName = identifier.getCarbonTableIdentifier().getTableUniqueName();
List<TableDataMap> tableIndices = allDataMaps.get(tableUniqueName);
if (tableIndices != null) {
int i = 0;
for (TableDataMap tableDataMap : tableIndices) {
if (carbonTable != null && tableDataMap != null && dataMapName
.equalsIgnoreCase(tableDataMap.getDataMapSchema().getDataMapName())) {
try {
DataMapUtil.executeDataMapJobForClearingDataMaps(carbonTable);
tableDataMap.clear();
} catch (IOException e) {
LOGGER.error(e, "clear dataMap job failed");
// ignoring the exception
}
tableDataMap.deleteDatamapData();
tableIndices.remove(i);
break;
}
i++;
}
allDataMaps.put(tableUniqueName, tableIndices);
}
}
/**
* is datamap exist
* @return true if exist, else return false
*/
public boolean isDataMapExist(String dbName, String tableName, String dmName) {
List<TableDataMap> tableDataMaps = allDataMaps.get(dbName + '_' + tableName);
if (tableDataMaps != null) {
for (TableDataMap dm : tableDataMaps) {
if (dm != null && dmName.equalsIgnoreCase(dm.getDataMapSchema().getDataMapName())) {
return true;
}
}
}
return false;
}
/**
* Get the blocklet datamap factory to get the detail information of blocklets
*
* @param table
* @return
*/
private BlockletDetailsFetcher getBlockletDetailsFetcher(CarbonTable table) {
TableDataMap blockletMap = getDataMap(table, BlockletDataMapFactory.DATA_MAP_SCHEMA);
return (BlockletDetailsFetcher) blockletMap.getDataMapFactory();
}
/**
* Returns the singleton instance
*
* @return
*/
public static DataMapStoreManager getInstance() {
return instance;
}
/**
* Get the TableSegmentRefresher for the table. If not existed then add one and return.
*/
public TableSegmentRefresher getTableSegmentRefresher(CarbonTable table) {
String uniqueName = table.getAbsoluteTableIdentifier().uniqueName();
if (segmentRefreshMap.get(uniqueName) == null) {
segmentRefreshMap.put(uniqueName, new TableSegmentRefresher(table));
}
return segmentRefreshMap.get(uniqueName);
}
/**
* Keep track of the segment refresh time.
*/
public static class TableSegmentRefresher {
// This map stores the latest segment refresh time.So in case of update/delete we check the
// time against this map.
private Map<String, SegmentRefreshInfo> segmentRefreshTime = new HashMap<>();
// This map keeps the manual refresh entries from users. It is mainly used for partition
// altering.
private Map<String, Boolean> manualSegmentRefresh = new HashMap<>();
TableSegmentRefresher(CarbonTable table) {
SegmentUpdateStatusManager statusManager = new SegmentUpdateStatusManager(table);
SegmentUpdateDetails[] updateStatusDetails = statusManager.getUpdateStatusDetails();
for (SegmentUpdateDetails updateDetails : updateStatusDetails) {
UpdateVO updateVO = statusManager.getInvalidTimestampRange(updateDetails.getSegmentName());
segmentRefreshTime.put(updateVO.getSegmentId(),
new SegmentRefreshInfo(updateVO.getCreatedOrUpdatedTimeStamp(), 0));
}
}
public boolean isRefreshNeeded(Segment seg, UpdateVO updateVo) throws IOException {
SegmentRefreshInfo segmentRefreshInfo =
seg.getSegmentRefreshInfo(updateVo);
String segmentId = seg.getSegmentNo();
if (segmentRefreshTime.get(segmentId) == null
&& segmentRefreshInfo.getSegmentUpdatedTimestamp() != null) {
segmentRefreshTime.put(segmentId, segmentRefreshInfo);
return true;
}
if (manualSegmentRefresh.get(segmentId) != null && manualSegmentRefresh.get(segmentId)) {
manualSegmentRefresh.put(segmentId, false);
return true;
}
boolean isRefresh = segmentRefreshInfo.compare(segmentRefreshTime.get(segmentId));
if (isRefresh) {
segmentRefreshTime.remove(segmentId);
}
return isRefresh;
}
public void refreshSegments(List<String> segmentIds) {
for (String segmentId : segmentIds) {
manualSegmentRefresh.put(segmentId, true);
}
}
public boolean isRefreshNeeded(String segmentId) {
if (manualSegmentRefresh.get(segmentId) != null && manualSegmentRefresh.get(segmentId)) {
manualSegmentRefresh.put(segmentId, false);
return true;
} else {
return false;
}
}
}
}
| jatin9896/incubator-carbondata | core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java | Java | apache-2.0 | 23,372 |
from zope.i18nmessageid import MessageFactory
PloneMessageFactory = MessageFactory('plone')
from Products.CMFCore.permissions import setDefaultRoles
setDefaultRoles('signature.portlets.gdsignature: Add GroupDocs Signature portlet',
('Manager', 'Site Administrator', 'Owner',))
| liosha2007/plone-groupdocs-signature-source | src/groupdocs/signature/portlets/__init__.py | Python | apache-2.0 | 294 |
package com.michaelfotiadis.crossyscore.ui.components.addplayer.avatar;
import android.view.View;
import android.widget.ImageView;
import com.michaelfotiadis.crossyscore.R;
import com.michaelfotiadis.crossyscore.ui.core.common.viewholder.BaseViewHolder;
import butterknife.Bind;
public final class ListAvatarViewHolder extends BaseViewHolder {
private static final int LAYOUT_ID = R.layout.list_item_single_image;
@Bind(R.id.image)
protected ImageView image;
public ListAvatarViewHolder(final View view) {
super(view);
}
public static int getLayoutId() {
return LAYOUT_ID;
}
} | MikeFot/android-crossy-score | app/src/main/java/com/michaelfotiadis/crossyscore/ui/components/addplayer/avatar/ListAvatarViewHolder.java | Java | apache-2.0 | 629 |
# -*- coding: utf-8 -*-
#
# File: src/webframe/management/commands/pref.py
# Date: 2020-04-22 21:35
# Author: Kenson Man <kenson@kenson.idv.hk>
# Desc: Import / Create / Update / Delete preference
#
from django.conf import settings
from django.contrib.auth import get_user_model
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
from django.db.models import Q
from pathlib import Path
from webframe.functions import TRUE_VALUES, LogMessage as lm, getTime
from webframe.models import Preference, AbstractPreference
from uuid import UUID
import logging, os, glob, sys, re
logger=logging.getLogger('webframe.commands.prefs')
class Command(BaseCommand):
help = '''Mainpulate the preference in database. Including insert/update/delete/view/import/gensecret/gendoc; Importing support csv|xlsx file.'''
def __getIndent__(self, indent=0, ch=' '):
return ch*indent
def create_parser(self, cmdName, subcommand, **kwargs):
parser=super().create_parser(cmdName, subcommand, **kwargs)
parser.epilog='''Example:\r\n
\tpref import path_to_prefs #Import a folder or a csv/xlsx file\r\n
\tpref set ABC --value="def" #Set the preference "ABC" to value "def"\r\n
\tpref gensecret #Generate the encryption secret; PLEASE backup in secure way.\r\n
\tpref gendoc prefsDoc.html #Generate the documentation and save as as output.html
'''
return parser
def add_arguments(self, parser):
#Default Value
pattern='Pref({pref.id}:{pref.name}): {pref.value}'
action='show'
max=256
wildcard='*'
tmpl='webframe/prefsDoc.html'
#Adding arguments
parser.add_argument('action', type=str, help='The action to be taken. One of import/export/show/set/delete/gensecret/gendoc; Default is {0}'.format(action), default=action)
parser.add_argument('name', type=str, nargs='?', help='[import/export/show/set/delete/gendoc]; The name of the preference or path of importing/exporting file (csv|xlsx);')
parser.add_argument('--file', dest='file', type=str, help='[import/export/gendoc]; The file path for import/export/output.')
parser.add_argument('--value', dest='value', type=str, help='[set/delete]; The value of the preference;', default=None)
parser.add_argument('--owner', dest='owner', type=str, help='[set/delete]; The owner of the preference; Optional;', default=None)
parser.add_argument('--noowner', dest='noowner', action='store_true', help='[show/set/delete]; The target preference has no owner; Optional; Default False')
parser.add_argument('--parent', dest='parent', type=str, help='[show/set/delete]; The parent\'s name of the preference. Optional;', default=None)
parser.add_argument('--noparent', dest='noparent', action='store_true', help='[show/set/delete]; The target preference has no parent; Optional; Default False')
parser.add_argument('--pattern', dest='pattern', type=str, help='[show]; The output pattern. {0}'.format(pattern), default=pattern)
parser.add_argument('--max', dest='max', type=int, help='[show]; The maximum number of preference to show. Default is {0}'.format(max), default=max)
parser.add_argument('--wildcard', dest='wildcard', type=str, help='[show]; Specify the wildcard; Default is {0}'.format(wildcard), default=wildcard)
#Importing
parser.add_argument('--sep', dest='separator', type=str, default=',', help='[import]; The separator when CSV importing; Default \",\"')
parser.add_argument('--encoding', dest='encoding', type=str, default='utf-8', help='[import]; The encoding when CSV importing; Default \"utf-8\"')
parser.add_argument('--quotechar', dest='quotechar', type=str, default='\"', help='[import]; The quote-char when CSV importing; Default double quote: \"')
parser.add_argument('--filepath', dest='filepath', action='store_true', help='[import]; Import the file-path in preferences; Default False')
parser.add_argument('--force', '-f ', dest='force', action='store_true', help='[import]; Force the import', default=False)
#Generate Doc
parser.add_argument('--tmpl', dest='tmpl', type=str, help="[gendoc]; The template name when generating document; Default: {0}".format(tmpl), default=tmpl)
def __get_owner__(self, owner=None):
if not owner: return None
logger.debug('Getting owner by: "%s"', owner)
owner=owner if owner else self.kwargs['owner']
return get_user_model().objects.get(username=owner) if owner else None
def __get_parent__(self, parent=None):
parent=parent if parent else self.kwargs['parent']
if parent:
try:
#Get parent by uuid
return Preference.objects.get(id=parent)
except:
try:
#Get parent by name
return Preference.objects.get(name=parent)
except:
pass
return None
def __get_pref__(self, **kwargs):
owner=kwargs['owner'] if 'owner' in kwargs else self.__get_owner__()
parent=kwargs['parent'] if 'parent' in kwargs else self.__get_parent__()
name=kwargs['name'] if 'name' in kwargs else self.kwargs['name']
lang=kwargs['lang'] if 'lang' in kwargs else None
if self.kwargs['filepath']: name=os.path.basename(name)
if self.kwargs['parent'] and parent==None:
raise Preference.DoesNotExist('Parent Preference not found: {0}'.format(self.kwargs['parent']))
rst=Preference.objects.all()
if name and name!='*':
rst=rst.filter(name=name)
if owner:
rst=rst.filter(owner=owner)
elif self.kwargs['noowner']:
rst=rst.filter(owner__isnull=True)
if parent:
rst=rst.filter(parent=parent)
elif self.kwargs['noparent']:
rst=rst.filter(parent__isnull=True)
if self.kwargs['filepath']:
rst=rst.filter(tipe=AbstractPreference.TYPE_FILEPATH)
rst=rst.order_by('owner', 'parent', 'sequence', 'name')
return rst
def __get_name__( self, name ):
'''
Get the name and sequence according to the name.
@param name The string including the sequence and name. For example, '01.Target' will return a tuple (1, 'Target')
@return A tuple including the sequence and the name
'''
p=re.search(r'^\d+\.', name)
if p:
s=p.group(0)
return name[len(s):].strip(), int(name[0:len(s)-1])
return (name, sys.maxsize if hasattr(sys, 'maxsize') else sys.maxint) #Default append
def output( self, pref, pattern=None ):
pattern=pattern if pattern else self.kwargs['pattern']
print(pattern.format(pref=pref))
pattern=' {0}'.format(pattern)
for ch in pref.childs:
self.output(ch, pattern)
def handle(self, *args, **kwargs):
verbosity=int(kwargs['verbosity'])
if verbosity==3:
logger.setLevel(logging.DEBUG)
elif verbosity==2:
logger.setLevel(logging.INFO)
elif verbosity==1:
logger.setLevel(logging.WARNING)
else:
logger.setLevel(logging.ERROR)
self.kwargs=kwargs
action=kwargs['action']
if action=='import':
self.imp()
elif action=='create': #for backward compatibility
self.set()
elif action=='update': #for backward compatibility
self.set()
elif action=='set':
self.set()
elif action=='delete':
self.delete()
elif action=='show':
self.show()
elif action=='gensecret':
self.gensecret()
elif action=='gendoc':
self.gendoc()
elif action=='export':
self.expCsv()
else:
logger.warning('Unknown action: {0}'.format(action))
logger.warn('DONE!')
def show(self):
logger.info('Showing the preference ...')
q=Preference.objects.all()
if self.kwargs['name']:
logger.info(' with the name filter: {0}'.format(self.kwargs['name']))
if self.kwargs['wildcard'] in self.kwargs['name']:
q=q.filter(name__icontains=self.kwargs['name'].replace(self.kwargs['wildcard'], ''))
else:
q=q.filter(name=self.kwargs['name'])
if self.kwargs['value']:
logger.info(' with the value filter: {0}'.format(self.kwargs['value']))
q=q.filter(value__icontains=self.kwargs['value'])
if self.kwargs['owner']:
logger.info(' which belongs to user: {0}'.format(self.kwargs['owner']))
q=q.filter(owner__username=self.kwargs['owner'])
if self.kwargs['parent']:
logger.info(' which belongs to preference: {0}'.format(self.kwargs['parent']))
q=q.filter(parent__name__iexact=self.kwargs['parent'])
else:
q=q.filter(parent__isnull=True)
for p in q:
self.output(p)
logger.warning('There have {0} preference(s) has been shown'.format(len(q)))
def set(self):
with transaction.atomic():
try:
pref=self.__get_pref__()
if pref.count()<1: raise Preference.DoesNotExist
cnt=pref.update(value=self.kwargs['value'])
logger.info('{0} of Preference(s) has been updated'.format(cnt))
except Preference.DoesNotExist:
p=Preference(name=self.kwargs['name'], value=self.kwargs['value'], owner=owner, parent=parent)
p.save()
logger.info('The preference<{0}> has been created with value: {1}'.format(p.name, p.value))
def delete(self):
pref=self.__get_pref__()
cnt=pref.count()
pref.delete()
logger.warning('{0} of Preference(s) has been deleted'.format(cnt))
def expRow( self, wr, pref, indent=0 ):
'''
Import the specified preference to csv.
'''
cnt=0
tab=self.__getIndent__(indent)
logger.debug(lm('{0}Exporting preference: {1}::{2}...', tab, pref.id, pref.name))
wr.writerow([
pref.name # [0]
, pref.realValue # [1]
, pref.parent.id if pref.parent else '' # [2]
, pref.owner.username if pref.owner else '' # [3]
, pref.helptext # [4]
, Preference.TYPES[pref.tipe][1] # [5]
, pref.encrypted # [6]
, pref.regex # [7]
])
cnt+=1
for p in pref.childs:
cnt+=self.expRow(wr, p, indent+3)
return cnt
def expCsv( self ):
'''
Import the specified list of preferences to csv.
'''
import csv
f=self.kwargs['file']
with open(f, 'w', encoding=self.kwargs['encoding']) as fp:
wr=csv.writer(fp, delimiter=self.kwargs['separator'], quotechar=self.kwargs['quotechar'], quoting=csv.QUOTE_MINIMAL, skipinitialspace=True)
cnt=0
for p in self.__get_pref__():
cnt+=self.expRow(wr, p, 0)
logger.info(lm('Exported {0} records', cnt))
def improw( self, cols, idx=0 ):
try:
name=cols[0]
val=cols[1]
parent=self.__get_parent__(cols[2])
owner=self.__get_owner__(cols[3])
helptext=cols[4]
tipe=cols[5]
encrypted=cols[6] in TRUE_VALUES
regex=cols[7]
lang=cols[8] if len(cols)>8 else None
logger.debug(' Importing row: {0}: {1} [{2}]'.format(idx, name, 'encrypted' if encrypted else 'clear-text'))
self.kwargs['name']=name
pref=self.__get_pref__(name=name, owner=owner, parent=parent, lang=lang)
if pref.count()<1: raise Preference.DoesNotExist
for p in pref:
p.encrypted=encrypted
p.helptext=helptext
p.tipe=tipe
p.regex=regex
#The value must be the last steps to set due to validation. Otherwise, once importing/assign a new value into this field, the last validation rule may be applied incorrectly
p.value=val
p.save()
except Preference.DoesNotExist:
Preference(name=name, _value=val, owner=owner, parent=parent, encrypted=encrypted, helptext=helptext, regex=regex, lang=lang).save()
except:
logger.debug(cols)
logger.exception('Error when handling the column')
raise
def impXlsx( self, f ):
'''
Import xlsx file.
'''
from openpyxl import load_workbook
wb=load_workbook(filename=f)
ws=wb.active
logger.info(' Importing worksheet: {0}!{1}'.format(f, ws.title))
cnt=0
with transaction.atomic():
for r in range(1, ws.max_row+1):
cols=list()
name=ws.cell(row=r, column=1).value
if isinstance(name, str): name=name.strip()
if not name: continue #Skip the row when it has no pref.name
if r==1 and (name.upper()=='ID' or name.upper()=='NAME' or name.upper()=='ID/Name'): continue #Skip the first row if header row
cols.append(name) #Name/ID
cols.append(ws.cell(row=r, column=2).value) #Value
cols.append(ws.cell(row=r, column=3).value) #Parent
cols.append(ws.cell(row=r, column=4).value) #Owner
cols.append(ws.cell(row=r, column=5).value) #Reserved
cols.append(ws.cell(row=r, column=6).value) #Tipe
cols.append(ws.cell(row=r, column=7).value) #encrypted
self.improw( cols, r )
cnt+=1
logger.info(' Imported {0} row(s)'.format(cnt))
def impCsv( self, f ):
'''
Import the csv file.
'''
import csv
with transaction.atomic():
logger.info(' Importing csv: {0}'.format(f))
cnt=0
with open(f, 'r', encoding=self.kwargs['encoding']) as fp:
if self.kwargs['quotechar']:
rows=csv.reader(fp, delimiter=self.kwargs['separator'], quotechar=self.kwargs['quotechar'], quoting=csv.QUOTE_MINIMAL, skipinitialspace=True)
else:
rows=csv.reader(fp, delimiter=self.kwargs['separator'], quoting=csv.QUOTE_NONE, skipinitialspace=True)
for row in rows:
if len(row)<1: continue #Skip the empty row
name=row[0].strip()
if not name: continue #Skip the row when it has no name
if cnt==0 and (name.upper()=='ID' or name.upper()=='NAME' or name.upper()=='ID/NAME'): continue #Skip the first row if header row
self.improw( row, cnt )
cnt+=1
logger.info(' Imported {0} row(s)'.format(cnt))
def impdir( self, d ):
if os.path.isdir(d):
logger.info('Importing directory: {0}'.format(d))
else:
logger.warning('This is not the directory: {0}'.format(d))
return
cnt=0
with transaction.atomic():
p=Preference.objects.pref('IMPORTED_PREFERENCES', returnValue=False)
p.helptext='<p>Sysetm use only! <strong>DO NOT MODIFY</strong> youself unless you understand the risk.</p>'
p.save()
for f in os.listdir(d):
if not (f.upper().endswith('.XLSX') or f.upper().endswith('.CSV')): continue #only support *.xlsx and *.csv
f=os.path.join(d, f)
try:
Preference.objects.get(name=f, parent=p)
if self.kwargs['force']: raise Preference.DoesNotExist
except Preference.DoesNotExist:
self.impfile( f )
cnt+=1
Preference(name=f, parent=p).save()
logger.debug('Imported {0} file(s)'.format(cnt))
def impfile( self, f ):
if not (os.path.isfile(f) and os.access(f, os.R_OK)):
logger.warning('The file is not readable: {0}'.format(f))
return
fn=f.lower()
if fn.endswith('.xlsx'):
self.impXlsx(f)
elif fn.endswith('.csv'):
self.impCsv(f)
else:
logger.info('Unsupported file: {0}'.format(f))
def imppath( self, p, parent=None):
name, seq=self.__get_name__(os.path.basename(p))
if os.path.isdir(p):
try:
pref=self.__get_pref__(name=name)
if pref.count()<1: raise Preference.DoesNotExist
pref=pref[0]
except Preference.DoesNotExist:
pref=Preference(name=name, parent=parent)
pref.tipe=AbstractPreference.TYPE_FILEPATH
pref.sequence=seq
pref.save()
for f in os.listdir(p):
path=os.path.join(p, f)
self.imppath(path, pref)
#Handling the ordering after import all the childs
ord=1
for c in pref.childs:
c.sequence=ord
c.save()
ord+=1
else:
try:
pref=self.__get_pref__(name=name)
if pref.count()<1: raise Preference.DoesNotExist
pref=pref[0]
except Preference.DoesNotExist:
pref=Preference(name=name, parent=parent)
pref.pathValue=p if os.path.isabs(p) else os.path.abspath(p)
pref.tipe=AbstractPreference.TYPE_FILEPATH
pref.sequence=seq
pref.save()
def imp(self):
disableOrder=getattr(settings, 'DISABLE_REORDER', False)
setattr(settings, 'DISABLE_REORDER', True) #Disable the re-ordering features during importing
try:
f=self.kwargs['file']
if self.kwargs['filepath']:
self.imppath(f)
elif os.path.isdir(f):
self.impdir(f)
elif os.path.isfile(f):
self.impfile(f)
finally:
setattr(settings, 'DISABLE_REORDER', disableOrder) #Resume the re-ordering features after importing
def gensecret(self):
from webframe.models import AbstractPreference
key=AbstractPreference.__getSecret__()
logger.warning(lm('Your secret is: {0}', key))
def gendoc(self):
from django.shortcuts import render
from django.template import loader, Template, Context
from webframe.providers import template_injection, fmt_injection
tmpl=getattr(self.kwargs, 'tmpl', 'webframe/prefDoc.html')
logger.warning(lm('Generating the documents according template: {0}', tmpl))
tmpl=loader.get_template(tmpl)
params=dict()
params.update(template_injection(None))
params.update(fmt_injection(None))
#params['target']=Preference.objects.filter(parent__isnull=True)
params['target']=self.__get_pref__()
params['TYPES']=Preference.TYPES
params['now']=getTime('now')
txt=tmpl.render(params)
output=self.kwargs.get('file')
if not output: output='prefsDoc.html'
logger.warning(lm('Generated! Outputing into: {0}', output))
with open(output, 'w') as f:
f.write(txt)
| kensonman/webframe | management/commands/pref.py | Python | apache-2.0 | 18,742 |
// --------------------------------------------------------------------------------------------------------------------
// <copyright file="HtmlFeatureFormatter.cs" company="PicklesDoc">
// Copyright 2011 Jeffrey Cameron
// Copyright 2012-present PicklesDoc team and community contributors
//
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
using System;
using System.Linq;
using System.Xml.Linq;
using PicklesDoc.Pickles.ObjectModel;
namespace PicklesDoc.Pickles.DocumentationBuilders.HTML
{
public class HtmlFeatureFormatter : IHtmlFeatureFormatter
{
private readonly HtmlDescriptionFormatter htmlDescriptionFormatter;
private readonly HtmlImageResultFormatter htmlImageResultFormatter;
private readonly HtmlScenarioFormatter htmlScenarioFormatter;
private readonly HtmlScenarioOutlineFormatter htmlScenarioOutlineFormatter;
private readonly XNamespace xmlns;
public HtmlFeatureFormatter(
HtmlScenarioFormatter htmlScenarioFormatter,
HtmlDescriptionFormatter htmlDescriptionFormatter,
HtmlScenarioOutlineFormatter htmlScenarioOutlineFormatter,
HtmlImageResultFormatter htmlImageResultFormatter)
{
this.htmlScenarioFormatter = htmlScenarioFormatter;
this.htmlScenarioOutlineFormatter = htmlScenarioOutlineFormatter;
this.htmlDescriptionFormatter = htmlDescriptionFormatter;
this.htmlImageResultFormatter = htmlImageResultFormatter;
this.xmlns = HtmlNamespace.Xhtml;
}
#region IHtmlFeatureFormatter Members
public XElement Format(Feature feature)
{
var div = new XElement(
this.xmlns + "div",
new XAttribute("id", "feature"),
this.htmlImageResultFormatter.Format(feature),
new XElement(this.xmlns + "h1", feature.Name));
var tags = RetrieveTags(feature);
if (tags.Length > 0)
{
var paragraph = new XElement(this.xmlns + "p", HtmlScenarioFormatter.CreateTagElements(tags.OrderBy(t => t).ToArray(), this.xmlns));
paragraph.Add(new XAttribute("class", "tags"));
div.Add(paragraph);
}
div.Add(this.htmlDescriptionFormatter.Format(feature.Description));
var scenarios = new XElement(this.xmlns + "ul", new XAttribute("id", "scenarios"));
int id = 0;
if (feature.Background != null)
{
scenarios.Add(this.htmlScenarioFormatter.Format(feature.Background, id++));
}
foreach (IFeatureElement featureElement in feature.FeatureElements)
{
var scenario = featureElement as Scenario;
if (scenario != null)
{
scenarios.Add(this.htmlScenarioFormatter.Format(scenario, id++));
}
var scenarioOutline = featureElement as ScenarioOutline;
if (scenarioOutline != null)
{
scenarios.Add(this.htmlScenarioOutlineFormatter.Format(scenarioOutline, id++));
}
}
div.Add(scenarios);
return div;
}
#endregion
private static string[] RetrieveTags(Feature feature)
{
if (feature == null)
{
return new string[0];
}
return feature.Tags.ToArray();
}
}
}
| ludwigjossieaux/pickles | src/Pickles/Pickles/DocumentationBuilders/HTML/HtmlFeatureFormatter.cs | C# | apache-2.0 | 4,200 |
/*
* Copyright 2016 Crown Copyright
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package stroom.util.client;
import com.google.gwt.json.client.JSONArray;
import com.google.gwt.json.client.JSONNumber;
import com.google.gwt.json.client.JSONObject;
import com.google.gwt.json.client.JSONParser;
import com.google.gwt.json.client.JSONString;
import com.google.gwt.json.client.JSONValue;
public class JSONUtil {
private JSONUtil() {
// Utility class.
}
public static JSONValue parse(final String json) {
if (json != null && !json.isEmpty()) {
return JSONParser.parseStrict(json);
}
return null;
}
public static JSONObject getObject(final JSONValue v) {
if (v != null) {
return v.isObject();
}
return null;
}
public static JSONArray getArray(final JSONValue v) {
if (v != null) {
return v.isArray();
}
return null;
}
public static String getString(final JSONValue v) {
if (v != null) {
final JSONString jsonString = v.isString();
if (jsonString != null) {
return jsonString.stringValue();
}
}
return null;
}
public static Integer getInteger(final JSONValue v) {
if (v != null) {
final JSONNumber jsonNumber = v.isNumber();
if (jsonNumber != null) {
return Integer.valueOf((int) jsonNumber.doubleValue());
}
}
return null;
}
public static Double getDouble(final JSONValue v) {
if (v != null) {
final JSONNumber jsonNumber = v.isNumber();
if (jsonNumber != null) {
return Double.valueOf(jsonNumber.doubleValue());
}
}
return null;
}
public static String[] getStrings(final JSONValue v) {
String[] strings = new String[0];
final JSONArray array = getArray(v);
if (array != null) {
strings = new String[array.size()];
for (int i = 0; i < array.size(); i++) {
strings[i] = getString(array.get(i));
}
}
return strings;
}
}
| gchq/stroom | stroom-core-client/src/main/java/stroom/util/client/JSONUtil.java | Java | apache-2.0 | 2,742 |
package com.nbsp.materialfilepicker.ui;
import android.content.Context;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.fragment.app.Fragment;
import androidx.recyclerview.widget.LinearLayoutManager;
import com.nbsp.materialfilepicker.R;
import com.nbsp.materialfilepicker.filter.FileFilter;
import com.nbsp.materialfilepicker.utils.FileUtils;
import com.nbsp.materialfilepicker.widget.EmptyRecyclerView;
import java.io.File;
import static java.util.Objects.requireNonNull;
public class DirectoryFragment extends Fragment {
private static final String ARG_FILE = "arg_file_path";
private static final String ARG_FILTER = "arg_filter";
private View mEmptyView;
private File mFile;
private FileFilter mFilter;
private EmptyRecyclerView mDirectoryRecyclerView;
private DirectoryAdapter mDirectoryAdapter;
private FileClickListener mFileClickListener;
@Override
public void onAttach(@NonNull Context context) {
super.onAttach(context);
mFileClickListener = (FileClickListener) context;
}
@Override
public void onDetach() {
super.onDetach();
mFileClickListener = null;
}
static DirectoryFragment getInstance(
File file,
FileFilter filter
) {
final DirectoryFragment instance = new DirectoryFragment();
final Bundle args = new Bundle();
args.putSerializable(ARG_FILE, file);
args.putSerializable(ARG_FILTER, filter);
instance.setArguments(args);
return instance;
}
@Nullable
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
final View view = inflater.inflate(R.layout.fragment_directory, container, false);
mDirectoryRecyclerView = view.findViewById(R.id.directory_recycler_view);
mEmptyView = view.findViewById(R.id.directory_empty_view);
return view;
}
@Override
public void onViewCreated(@NonNull View view, Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
initArgs();
initFilesList();
}
private void initFilesList() {
mDirectoryAdapter = new DirectoryAdapter(FileUtils.getFileList(mFile, mFilter));
mDirectoryAdapter.setOnItemClickListener(new ThrottleClickListener() {
@Override
void onItemClickThrottled(View view, int position) {
if (mFileClickListener != null) {
mFileClickListener.onFileClicked(mDirectoryAdapter.getModel(position));
}
}
});
mDirectoryRecyclerView.setLayoutManager(new LinearLayoutManager(getActivity()));
mDirectoryRecyclerView.setAdapter(mDirectoryAdapter);
mDirectoryRecyclerView.setEmptyView(mEmptyView);
}
private void initArgs() {
final Bundle arguments = requireNonNull(getArguments());
if (arguments.containsKey(ARG_FILE)) {
mFile = (File) getArguments().getSerializable(ARG_FILE);
}
mFilter = (FileFilter) getArguments().getSerializable(ARG_FILTER);
}
interface FileClickListener {
void onFileClicked(File clickedFile);
}
}
| nbsp-team/MaterialFilePicker | library/src/main/java/com/nbsp/materialfilepicker/ui/DirectoryFragment.java | Java | apache-2.0 | 3,395 |
/*
* Copyright 2009 Aleksandar Seovic
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.seovic.core.factory;
import com.seovic.core.Factory;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* {@link Factory} implementation that creates a <tt>java.util.LinkedHashMap</tt>
* instance.
*
* @author Aleksandar Seovic 2010.11.08
*/
public class LinkedHashMapFactory<K, V>
extends AbstractFactory<Map<K, V>> {
private static final long serialVersionUID = -2766923385818267291L;
/**
* {@inheritDoc}
*/
@Override
public Map<K, V> create() {
return new LinkedHashMap<K, V>();
}
} | aseovic/coherence-tools | core/src/main/java/com/seovic/core/factory/LinkedHashMapFactory.java | Java | apache-2.0 | 1,163 |
/*
* Copyright 2011 Vasily Shiyan
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package see.evaluation.processors;
import see.evaluation.ValueProcessor;
import see.util.Reduce;
import javax.annotation.Nullable;
import static java.util.Arrays.asList;
import static see.util.Reduce.fold;
public class AggregatingProcessor implements ValueProcessor {
private final Iterable<? extends ValueProcessor> processors;
private AggregatingProcessor(Iterable<? extends ValueProcessor> processors) {
this.processors = processors;
}
@Override
public Object apply(@Nullable Object input) {
return fold(input, processors, new Reduce.FoldFunction<ValueProcessor, Object>() {
@Override
public Object apply(Object prev, ValueProcessor arg) {
return arg.apply(prev);
}
});
}
public static ValueProcessor concat(ValueProcessor... processors) {
return new AggregatingProcessor(asList(processors));
}
public static ValueProcessor concat(Iterable<? extends ValueProcessor> processors) {
return new AggregatingProcessor(processors);
}
}
| xicmiah/see | src/main/java/see/evaluation/processors/AggregatingProcessor.java | Java | apache-2.0 | 1,669 |
var Canvas = require('canvas');
function generateCode() {
return ('' + Math.random()).substr(3, 6);
}
function generateImage(req, res, params) {
params.color = params.color || 'rgb(0, 0, 0)';
params.background = params.background || 'rgb(255, 255, 255)';
params.width = params.width || 250;
params.height = params.height || 150;
params.innerWidth = params.width * 0.6;
params.fontHeight = params.height * 0.6;
params.offset = params.width * 0.08;
params.fontWidth = Math.ceil(params.fontHeight / 2);
var offset = params.width * 0.4 * Math.random();
var canvas = new Canvas(params.width, params.height);
var ctx = canvas.getContext('2d');
ctx.antialias = 'gray';
ctx.fillStyle = params.background;
ctx.fillRect(0, 0, params.width, params.height);
ctx.fillStyle = params.color;
ctx.lineWidth = params.fontHeight / 10;
ctx.strokeStyle = params.color;
ctx.font = params.fontHeight + 'px sans';
for (var i = 0; i < 2; i++) {
ctx.moveTo(offset, Math.random() * params.innerWidth);
ctx.bezierCurveTo(
params.width * 0.32,
Math.random() * params.height,
params.width * 0.64,
Math.random() * params.height,
params.width * 0.92,
Math.random() * params.height);
ctx.stroke();
}
var text = params.text || generateCode();
for (i = 0; i < text.length; i++) {
ctx.setTransform(Math.random() * 0.5 + 1, Math.random() * 0.4, Math.random() * 0.4, Math.random() * 0.5 + 1, params.fontWidth * i + offset, params.height * 2 / 3);
ctx.fillText(text.charAt(i), 0, 0);
}
canvas.toBuffer(function(err, buf) {
if(req.session)
req.session.captcha = text;
res.end(buf);
});
}
module.exports = function(params){
if (params.hasOwnProperty('text'))
delete params.text;
return function(req, res, next){
generateImage(req, res, params);
};
};
module.exports.generateImage = generateImage;
module.exports.generateCode = generateCode;
| klesh/kaptcha | kaptcha.js | JavaScript | apache-2.0 | 1,961 |
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Threading.Tasks;
namespace ChurchWebsite.Presentation.Models.AccountViewModels
{
public class LoginViewModel
{
[Required]
[EmailAddress]
public string Email { get; set; }
[Required]
[DataType(DataType.Password)]
public string Password { get; set; }
[Display(Name = "Remember me?")]
public bool RememberMe { get; set; }
}
}
| Yodilicious/ChurchWebsite | src/ChurchWebsite/ChurchWebsite.Presentation/Models/AccountViewModels/LoginViewModel.cs | C# | apache-2.0 | 530 |
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Diagnostics;
using System.Drawing;
using System.Linq;
using System.Reflection;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using System.Windows.Forms;
using Next.Accounts_Client.Application_Space;
using Next.Accounts_Client.Controllers;
using Next.Accounts_Client.Controllers.Realize_Classes;
using Next.Accounts_Server.Application_Space;
using Next.Accounts_Server.Controllers;
using Next.Accounts_Server.Extensions;
using Next.Accounts_Server.Models;
using Next.Accounts_Server.Web_Space;
using Next.Accounts_Server.Web_Space.Model;
using IResponseListener = Next.Accounts_Server.Web_Space.IResponseListener;
using Next.Accounts_Server.Web_Space.Realize_Classes;
namespace Next.Accounts_Client
{
public partial class Form1 : Form, IEventListener, ITrackerListener, IResponseListener
{
private IRequestSender _requestSender;
private IProcessLauncher _processLauncher;
private ProcessTracker _processTracker;
private IUsingTracker _usingTracker;
private ILogger _logger;
private ClientSettings _clientSettings;
private Sender _sender;
private Account _account = null;
private readonly string[] _arguments = null;
private string _gameCode = null;
private bool _connectionActive = false;
private bool _badConnectionOrDenied = false;
public Form1(string[] args)
{
InitializeComponent();
InitSettings();
_arguments = args;
}
private async void InitSettings()
{
var stringSettings = await IoController.ReadFileAsync(Const.SettingsFilename);
if (stringSettings == null)
{
_clientSettings = new ClientSettings();
await IoController.WriteToFileAsync(Const.SettingsFilename, _clientSettings.ToJson());
}
else { _clientSettings = stringSettings.ParseJson<ClientSettings>(); }
var version = Assembly.GetExecutingAssembly().GetName().Version.ToString();
_sender = Const.GetSender(version: version, centerName: _clientSettings.CenterName);
_logger = new DefaultLogger();
_requestSender = new WebClientController(this, this, _clientSettings.IpAddress);
_processLauncher = new DefaultProcessLauncher(this);
_processTracker = new ProcessTracker(_clientSettings)
{
EventListener = this,
ProcessLauncher = _processLauncher,
TrackerListener = this
};
_usingTracker = new DefaultUsingTracker(_requestSender, _sender);
version = $"Version {Assembly.GetExecutingAssembly().GetName().Version.ToString()}";
VersionLabel.Text = version;
}
private async void RequestAccount(bool noVacBan = false)
{
StartProgressBar();
var api = new ApiMessage
{
Code = 200,
JsonObject = null,
RequestType = Const.RequestTypeGet,
StringMessage = "GameComputer",
JsonSender = _sender.ToJson(),
VacBanFree = noVacBan
};
_connectionActive = true;
var sendPostDataAsync = _requestSender?.SendPostDataAsync(api);
var result = sendPostDataAsync != null && await sendPostDataAsync;
}
private void button1_Click(object sender, EventArgs e) => RequestAccount();
private void DisplayText(string text) => LogTextBox.Text += $"[{DateTime.Now}] {text}\r\n";
private void DisplayInMainlabel(string text) => MainLabel.Text = text;
public void OnEvent(string message, object sender = null)
{
DisplayText(message);
_logger.Log(message);
}
public void OnException(Exception ex)
{
DisplayText(ex.Message);
_logger.LogError(ex.Message);
}
public void OnEvent(string message)
{
throw new NotImplementedException();
}
private async void button2_Click(object sender, EventArgs e)
{
var result = await ReleaseAccount();
}
private async Task<bool> ReleaseAccount()
{
_account.ComputerName = "";
var api = new ApiMessage
{
Code = 200,
JsonObject = _account.ToJson(),
RequestType = Const.RequestTypeRelease,
StringMessage = "GameComputer",
JsonSender = _sender.ToJson()
};
_account = null;
_connectionActive = true;
var result = await _requestSender.SendPostDataAsync(api);
return result;
}
private void button4_Click(object sender, EventArgs e)
{
}
private void Notify(string message, string title = "Steam launcher", int timeout = 3)
{
notifyIcon.ShowBalloonTip(timeout, title, message, ToolTipIcon.Info);
}
public void LaunchSteam(Account account, string applicationCode)
{
var info = new ProcessStartInfo
{
FileName = _clientSettings.SteamDirectory,
Arguments = $"-applaunch {applicationCode} -login {account.Login} {account.Password}"
};
var result = _processLauncher.StartProcess(info);
if (!result) OnSteamClosed();
}
public void OnAccountReleased(Account account)
{
//throw new NotImplementedException();
}
public void OnSteamStarted()
{
WindowState = FormWindowState.Minimized;
//notifyIcon.Visible = true;
Notify("Steam запущен");
}
public async void OnSteamClosed()
{
bool result = false;
while (!result)
{
result = await ReleaseAccount();
}
Notify("Steam закрыт");
CloseApplication();
}
public void OnServerResponse(string responseString)
{
_connectionActive = false;
OkButton.Enabled = true;
ApiMessage apiResponse = responseString.ParseJson<ApiMessage>();
if (apiResponse == null)
{
DisplayText($"Received null apiResponse: {responseString}");
DisplayInMainlabel(_clientSettings.BadConnectionMessage);
_badConnectionOrDenied = true;
return;
}
string displayMessage = null;
var requestType = Const.GetRequestType(apiResponse);
if (apiResponse.Code == 404)
{
displayMessage = $"Received responseCode={apiResponse.Code}. String message: {apiResponse.StringMessage}";
DisplayText(displayMessage);
_badConnectionOrDenied = true;
if (requestType == ApiRequests.GetAccount)
{
DisplayInMainlabel(_clientSettings.NoAvailableAccountsMessage);
}
return;
}
string jsonObject = null;
Account account = null;
var sender = apiResponse.JsonSender.ParseJson<Sender>();
switch (requestType)
{
case ApiRequests.GetAccount:
jsonObject = apiResponse.JsonObject;
account = jsonObject?.ParseJson<Account>();
if (account == null)
{
displayMessage = "null account data";
break;
}
_account = account;
_account.CenterOwner = _clientSettings.CenterName;
_account.ComputerName = _sender.Name;
displayMessage = $"Account {_account} received. Sender {sender}";
DisplayInMainlabel(_clientSettings.OkayMessage);
_usingTracker.SetAccount(_account);
// Launch steam if an account has been received
LaunchSteam(_account, _gameCode);
break;
case ApiRequests.ReleaseAccount:
jsonObject = apiResponse.JsonObject;
account = jsonObject?.ParseJson<Account>();
_account = null;
displayMessage = account != null ?
$"Account {account} has been released. Sender {sender}" :
$"null account data while ReleaseAccount processing";
DisplayInMainlabel(_clientSettings.ReleasedMessage);
_usingTracker.ClearAccount();
// Closing the application if the account has been released
CloseApplication();
break;
case ApiRequests.UsingAccount:
jsonObject = apiResponse.JsonObject;
account = jsonObject?.ParseJson<Account>();
if (account != null && _account == null) _account = account;
displayMessage = account != null ?
$"Account {_account} time has been reset. Sender {sender}" :
$"null account data while UsingAccount processing";
break;
case ApiRequests.Unknown:
case ApiRequests.None:
default:
displayMessage = "No ways inside SWITCH statement. Sender {sender}";
break;
}
DisplayText(displayMessage);
StopProgressBar();
}
public void OnConnectionError(Exception ex)
{
DisplayInMainlabel(_clientSettings.BadConnectionMessage);
StopProgressBar();
OkButton.Enabled = true;
_badConnectionOrDenied = true;
}
private void progresBarTimer_Tick(object sender, EventArgs e)
{
var curValue = progressBar.Value;
curValue += 5;
if (curValue >= progressBar.Maximum)
{
StartProgressBar();
return;
}
progressBar.Value = curValue;
}
private void StartProgressBar()
{
progressBar.Value = progressBar.Minimum;
progresBarTimer.Enabled = true;
}
private void StopProgressBar()
{
progressBar.Value = progressBar.Maximum;
progresBarTimer.Enabled = false;
}
private void Form1_Shown(object sender, EventArgs e)
{
OkButton.Enabled = false;
_processLauncher?.CloseProcesses(_clientSettings?.ProcessName);
var noVacBan = false;
if (_arguments != null)
{
_gameCode = _arguments.Length >= 1 ? _arguments[0] : "0";
if (_clientSettings?.VacBanGames != null)
{
noVacBan = _clientSettings.VacBanGames.Any(i => i == _gameCode);
}
else if (_gameCode == "730") noVacBan = true;
var title = _arguments.Length >= 2 ? _arguments[1] : "Steam launcher";
this.Text = title;
}
RequestAccount(noVacBan);
}
private void CloseApplication()
{
Application.Exit();
}
private void MainButton(object sender, EventArgs e)
{
CloseApplication();
}
private void notifyIcon_MouseClick(object sender, MouseEventArgs e)
{
WindowState = FormWindowState.Normal;
//notifyIcon.Visible = false;
}
private async void Form1_FormClosing(object sender, FormClosingEventArgs e)
{
var reason = e.CloseReason;
if (reason == CloseReason.WindowsShutDown || _badConnectionOrDenied)
{
if (_account != null)
{
await ReleaseAccount();
}
_processLauncher.CloseProcesses(_clientSettings.ProcessName);
return;
}
if (reason == CloseReason.ApplicationExitCall)
{
if (_account == null)
{
_processLauncher.CloseProcesses(_clientSettings.ProcessName);
return;
}
}
e.Cancel = true;
WindowState = FormWindowState.Minimized;
}
private void Form1_Load(object sender, EventArgs e)
{
groupBox1.Visible = false;
}
}
}
| maximgorbatyuk/Next.Accounts-Server | Next.Accounts Client/Form1.cs | C# | apache-2.0 | 13,016 |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using Microsoft.AspNetCore.Authentication.Cookies;
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.DependencyInjection;
using RazorPagesWebSite.Conventions;
namespace RazorPagesWebSite
{
public class StartupWithBasePath
{
private readonly IWebHostEnvironment _hostingEnvironment;
public StartupWithBasePath(IWebHostEnvironment hostingEnvironment)
{
_hostingEnvironment = hostingEnvironment;
}
public void ConfigureServices(IServiceCollection services)
{
services.AddAuthentication(CookieAuthenticationDefaults.AuthenticationScheme)
.AddCookie(options => options.LoginPath = "/Login");
var builder = services.AddMvc()
.AddCookieTempDataProvider()
.AddRazorPagesOptions(options =>
{
options.Conventions.AuthorizePage("/Conventions/Auth");
options.Conventions.AuthorizeFolder("/Conventions/AuthFolder");
options.Conventions.AuthorizeAreaFolder("Accounts", "/RequiresAuth");
options.Conventions.AllowAnonymousToAreaPage("Accounts", "/RequiresAuth/AllowAnonymous");
options.Conventions.Add(new CustomModelTypeConvention());
});
}
public void Configure(IApplicationBuilder app)
{
app.UseStaticFiles();
app.UseRouting();
app.UseAuthentication();
app.UseAuthorization();
app.UseEndpoints(endpoints =>
{
endpoints.MapControllerRoute("areaRoute", "{area:exists}/{controller=Home}/{action=Index}");
endpoints.MapRazorPages();
});
}
}
}
| aspnet/AspNetCore | src/Mvc/test/WebSites/RazorPagesWebSite/StartupWithBasePath.cs | C# | apache-2.0 | 1,980 |
package Armadillo.Communication.Impl.Distributed;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import org.joda.time.DateTime;
import org.joda.time.Minutes;
import org.joda.time.Seconds;
import Armadillo.Core.Logger;
import Armadillo.Core.ObjectWrapper;
import Armadillo.Communication.Impl.Topic.SubscriberCallbackDel;
import Armadillo.Communication.Impl.Topic.TopicConstants;
import Armadillo.Communication.Impl.Topic.TopicMessage;
import Armadillo.Communication.Impl.Topic.TopicPublisherCache;
import Armadillo.Communication.Impl.Topic.TopicSubscriberCache;
import Armadillo.Core.Concurrent.ThreadWorker;
import Armadillo.Core.Math.RollingWindowStdDev;
import Armadillo.Core.SelfDescribing.ASelfDescribingClass;
import Armadillo.Core.SelfDescribing.SelfDescribingClass;
import Armadillo.Core.Text.StringHelper;
public class DistControllerToWorkerHeartBeat {
public ConcurrentHashMap<String, String> WorkersStatus;
public RollingWindowStdDev PingLatencySecs;
public ConcurrentHashMap<String, DateTime> WorkersPingTimes;
private String m_strControllerId;
private DistController m_distController;
private ThreadWorker<ObjectWrapper> m_clockThreadWorker;
public ConcurrentHashMap<String, DateTime> WorkersJobsInProgress;
public DistControllerToWorkerHeartBeat(DistController distController)
{
try
{
WorkersJobsInProgress = new ConcurrentHashMap<String, DateTime>();
m_distController = distController;
m_strControllerId = distController.ControllerId;
PingLatencySecs = new RollingWindowStdDev(20);
WorkersPingTimes = new ConcurrentHashMap<String, DateTime>();
WorkersStatus = new ConcurrentHashMap<String, String>();
String strTopic = m_distController.GridTopic + EnumDistributed.TopicWorkerToControllerHeartBeat.toString();
TopicSubscriberCache.GetSubscriber(
distController.ServerName,
TopicConstants.PUBLISHER_HEART_BEAT_PORT).Subscribe(
strTopic,
new SubscriberCallbackDel(){
public void invoke(TopicMessage topicMessage) {
OnTopicWorkerToControllerHeartBeat(topicMessage);
};}
);
TopicSubscriberCache.GetSubscriber(
distController.ServerName,
TopicConstants.PUBLISHER_HEART_BEAT_PORT).Subscribe(
EnumDistributed.WorkerJobsToDoTopic.toString(),
new SubscriberCallbackDel(){
public void invoke(TopicMessage topicMessage) {
try{
String strJobId = (String)topicMessage.EventData;
WorkersJobsInProgress.put(strJobId, DateTime.now());
}
catch(Exception ex){
Logger.log(ex);
}
};}
);
m_clockThreadWorker = new ThreadWorker<ObjectWrapper>(){
@Override
public void runTask(ObjectWrapper item) {
try{
while(true)
{
try{
OnClockTick();
}
catch(Exception ex){
Logger.log(ex);
}
finally{
Thread.sleep(3000);
}
}
}
catch(Exception ex){
Logger.log(ex);
}
}
};
m_clockThreadWorker.work();
}
catch (Exception ex)
{
Logger.log(ex);
}
}
private void OnTopicWorkerToControllerHeartBeat(TopicMessage topicmessage)
{
try
{
ASelfDescribingClass workerResponse = (ASelfDescribingClass)(topicmessage.EventData);
String strWorkerId = workerResponse.GetStrValue(EnumDistributed.WorkerId);
DateTime timeSent = new DateTime(workerResponse.GetDateValue(EnumDistributed.TimeControllerToWorker));
DateTime now = DateTime.now();
PingLatencySecs.Update(
Seconds.secondsBetween(timeSent, now).getSeconds());
if (!WorkersPingTimes.containsKey(strWorkerId))
{
String strMessage = "Connected worker [" + strWorkerId + "]";
DistGuiHelper.PublishControllerLog(m_distController, strMessage);
}
WorkersPingTimes.put(strWorkerId, now);
}
catch (Exception ex)
{
Logger.log(ex);
}
}
private void OnClockTick()
{
DistGuiHelper.PublishControllerLog(m_distController, "Started worker pinger...");
while (true)
{
try
{
PingWorker();
CheckAliveWorkers();
//
// flush old jobs in progress
//
ArrayList<String> keysToDelete = new ArrayList<String>();
for(Entry<String, DateTime> kvp : WorkersJobsInProgress.entrySet()){
int intMinutes = Minutes.minutesBetween(
kvp.getValue(),
DateTime.now()).getMinutes();
if(intMinutes > 60){
keysToDelete.add(kvp.getKey());
}
}
for(String strKey : keysToDelete){
WorkersJobsInProgress.remove(strKey);
}
}
catch (Exception ex)
{
Logger.log(ex);
}
try {
Thread.sleep(1000 * DistConstants.PING_WORKER_TIME_SECS);
} catch (InterruptedException e) {
Logger.log(e);
}
}
}
public void CheckWorkersJobsInProgress(
String strJobId,
String strWorkerId) {
try{
if(!WorkersJobsInProgress.containsKey(strJobId)){
WorkersJobsInProgress.put(strJobId, DateTime.now());
}
DateTime lastPingTime = WorkersJobsInProgress.get(strJobId);
int intTotalSeconds = Seconds.secondsBetween(lastPingTime, DateTime.now()).getSeconds();
if(intTotalSeconds > 120){
//
// job is no longer being done by worker
//
RemoveWorker(strWorkerId, intTotalSeconds);
WorkersPingTimes.remove(strWorkerId);
}
}
catch(Exception ex){
Logger.log(ex);
}
}
private void CheckAliveWorkers()
{
try
{
DateTime now = DateTime.now();
for (Map.Entry<String, DateTime> kvp : WorkersPingTimes.entrySet())
{
int intTotalSeconds = (int) Seconds.secondsBetween(kvp.getValue(), now).getSeconds();
if (intTotalSeconds > DistConstants.ALIVE_WORKER_TIME_SECS)
{
RemoveWorker(kvp.getKey(), intTotalSeconds);
WorkersPingTimes.remove(kvp.getKey());
}
else
{
WorkersStatus.put(kvp.getKey(), EnumDistributed.Connected.toString());
}
}
}
catch (Exception ex)
{
Logger.log(ex);
}
}
public void RemoveJobsInProgressFromRequestor(String strRequestorName)
{
try
{
Set<Entry<String, ASelfDescribingClass>> jobsInProgressArr;
synchronized (m_distController.DistControllerJobPull.JobsInProgressLock)
{
jobsInProgressArr = m_distController.JobsToDoMap.entrySet();
}
for (Entry<String, ASelfDescribingClass> kvp : jobsInProgressArr)
{
boolean blnDoRemove = false;
String strJobId = kvp.getKey();
ASelfDescribingClass currParams = kvp.getValue();
String strCurrRequestorName = currParams.TryGetStrValue(
EnumDistributed.RequestorName);
if (!StringHelper.IsNullOrEmpty(strCurrRequestorName))
{
if (strCurrRequestorName.equals(strRequestorName))
{
blnDoRemove = true;
}
}
else
{
blnDoRemove = true;
}
if (blnDoRemove)
{
synchronized (m_distController.DistControllerJobPull.JobsInProgressLock)
{
ASelfDescribingClass resultTsEv;
if (m_distController.JobsToDoMap.containsKey(
kvp.getKey()))
{
resultTsEv = m_distController.JobsToDoMap.get(
kvp.getKey());
m_distController.JobsToDoMap.remove(
kvp.getKey());
String strMessage = "Calc engine successfully flushed job [" + strJobId +
"] from client [" + strRequestorName + "]";
SelfDescribingClass resultObj = new SelfDescribingClass();
resultObj.SetClassName(
getClass().getName() + "_ResultFlush");
DistGuiHelper.PublishControllerLog(
m_distController,
strMessage);
resultObj.SetBlnValue(
EnumCalcCols.IsClientDisconnected,
true);
resultObj.SetStrValue(
EnumCalcCols.Error,
strMessage);
resultTsEv.SetObjValueToDict(
EnumCalcCols.Result,
resultObj);
}
if(m_distController.DistControllerJobPull.MapJobIdToWorkerId.containsKey(
strJobId)){
ASelfDescribingClass jobLog = m_distController.DistControllerJobPull.MapJobIdToWorkerId.get(
strJobId);
m_distController.DistControllerJobPull.MapJobIdToWorkerId.remove(
strJobId);
DistGuiHelper.PublishJobLogStatus(
m_distController,
jobLog,
"Removed");
}
}
}
}
}
catch (Exception ex)
{
Logger.log(ex);
}
}
public boolean IsWorkerConnected(String strWorkerId)
{
if(!WorkersStatus.containsKey(strWorkerId))
{
return true;
}
String workerStatus = WorkersStatus.get(strWorkerId);
return workerStatus.equals(EnumDistributed.Connected.toString());
}
private void RemoveWorker(
String strWorkerId,
int intTotalSeconds)
{
try
{
DistGuiHelper.PublishControllerLog(
m_distController,
"Disconnected worker[" +
strWorkerId + "][" + intTotalSeconds + "]secs");
WorkersStatus.put(strWorkerId, EnumDistributed.Disconnected.toString());
List<String> assignedJobs = new ArrayList<String>();
for(Entry<String, ASelfDescribingClass> kvp2 :
m_distController.DistControllerJobPull.MapJobIdToWorkerId.entrySet()){
if(DistControllerJobLogger.GetWorkerId(kvp2.getValue()).equals(strWorkerId)){
assignedJobs.add(kvp2.getKey());
}
}
// (from n in m_distController.DistControllerJobPull.MapJobIdToWorkerId
// where DistControllerJobLogger.GetWorkerId(n.Value).Equals(strWorkerId)
// select n.Key).ToList();
for(String strJobId : assignedJobs)
{
if(m_distController.DistControllerJobPull.MapJobIdToWorkerId.containsKey(strJobId))
{
ASelfDescribingClass jobLog =
m_distController.DistControllerJobPull.MapJobIdToWorkerId.get(strJobId);
m_distController.DistControllerJobPull.MapJobIdToWorkerId.remove(
strJobId);
DistGuiHelper.PublishJobLogStatus(
m_distController,
jobLog,
"ClientDisconnected");
DistGuiHelper.PublishControllerLog(m_distController,
"Removed worker[" +
strWorkerId + "]. Job id [" +
strJobId +"]");
}
}
}
catch (Exception ex)
{
Logger.log(ex);
}
}
private void PingWorker()
{
try
{
if(m_distController.DistTopicQueue == null)
{
return;
}
SelfDescribingClass calcParams = new SelfDescribingClass();
calcParams.SetClassName(EnumDistributed.HeartBeatWorkerClass);
calcParams.SetStrValue(
EnumDistributed.ControllerId,
m_strControllerId);
calcParams.SetDateValue(
EnumDistributed.TimeControllerToWorker,
DateTime.now().toDate());
calcParams.SetDateValue(
EnumDistributed.Time,
DateTime.now().toDate());
String strTopic = m_distController.GridTopic +
EnumDistributed.TopicControllerToWorkerHeartBeat.toString();
TopicPublisherCache.GetPublisher(
m_distController.ServerName,
TopicConstants.SUBSCRIBER_HEART_BEAT_PORT).SendMessageImmediately(
calcParams,
strTopic);
}
catch (Exception ex)
{
Logger.log(ex);
}
}
public void Dispose()
{
if(WorkersStatus != null)
{
WorkersStatus.clear();
WorkersStatus = null;
}
if(PingLatencySecs != null)
{
PingLatencySecs.Dispose();
PingLatencySecs = null;
}
if(WorkersPingTimes != null)
{
WorkersPingTimes.clear();
WorkersPingTimes = null;
}
m_distController = null;
if(m_clockThreadWorker != null)
{
m_clockThreadWorker.Dispose();
m_clockThreadWorker = null;
}
}
}
| camachohoracio/Armadillo.Core | Communication/src/main/java/Armadillo/Communication/Impl/Distributed/DistControllerToWorkerHeartBeat.java | Java | apache-2.0 | 15,386 |
# -*- coding: UTF-8 -*-
import hashlib
import base64
import datetime
import urllib2
import json
class TemplateSMS:
account_sid = ''
account_token = ''
app_id = ''
server_ip = ''
server_port = ''
soft_version = ''
timestamp = ''
def set_account(self, account_sid, token):
self.account_sid = account_sid
self.account_token = token
def __init__(self, ip, port, version):
self.server_ip = ip
self.server_port = port
self.soft_version = version
def set_app_id(self, app_id):
self.app_id = app_id
def send_template_sms(self, to, random, valid_min, temp_id):
now_date = datetime.datetime.now()
self.timestamp = now_date.strftime("%Y%m%d%H%M%S")
signature = self.account_sid + self.account_token + self.timestamp
sig = hashlib.md5()
sig.update(signature)
sig = sig.hexdigest().upper()
url = "https://" + self.server_ip + ":" + self.server_port + "/" + self.soft_version + "/Accounts/" + \
self.account_sid + "/SMS/TemplateSMS?sig=" + sig
src = self.account_sid + ":" + self.timestamp
req = urllib2.Request(url)
b = '["%s","%s"]' % (random, valid_min)
body = '''{"to": "%s", "datas": %s, "templateId": "%s", "appId": "%s"}''' % (to, b, temp_id, self.app_id)
req.add_data(body)
auth = base64.encodestring(src).strip()
req.add_header("Authorization", auth)
req.add_header("Accept", 'application/json;')
req.add_header("Content-Type", "application/json;charset=utf-8;")
req.add_header("Host", "127.0.0.1")
req.add_header("content-length", len(body))
try:
res = urllib2.urlopen(req)
data = res.read()
res.close()
locations = json.loads(data)
return locations
except:
return {'172001': 'network error'}
def query_account_info(self):
now_date = datetime.datetime.now()
self.timestamp = now_date.strftime("%Y%m%d%H%M%S")
signature = self.account_sid + self.account_token + self.timestamp
sig = hashlib.md5()
sig.update(signature)
sig = sig.hexdigest().upper()
url = "https://" + self.server_ip + ":" + self.server_port + "/" + self.soft_version + "/Accounts/" + \
self.account_sid + "/AccountInfo?sig=" + sig
src = self.account_sid + ":" + self.timestamp
auth = base64.encodestring(src).strip()
req = urllib2.Request(url)
req.add_header("Accept", "application/json")
req.add_header("Content-Type", "application/jsoncharset=utf-8")
req.add_header("Authorization", auth)
try:
res = urllib2.urlopen(req)
data = res.read()
res.close()
locations = json.loads(data)
return locations
except:
return {"statusCode": '172001'}
| davidvon/pipa-pay-server | admin/sms/sdk.py | Python | apache-2.0 | 2,956 |
/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2022 DBeaver Corp and others
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.model.navigator;
import org.jkiss.code.NotNull;
import java.util.List;
/**
* DBNNodeExtendable
*/
public interface DBNNodeExtendable
{
@NotNull
List<DBNNode> getExtraNodes();
void addExtraNode(@NotNull DBNNode node, boolean reflect);
void removeExtraNode(@NotNull DBNNode node);
} | dbeaver/dbeaver | plugins/org.jkiss.dbeaver.model/src/org/jkiss/dbeaver/model/navigator/DBNNodeExtendable.java | Java | apache-2.0 | 987 |
package com.faravy.icare;
import java.util.ArrayList;
import android.app.ActionBar;
import android.app.Activity;
import android.content.ContentProviderOperation;
import android.content.Intent;
import android.content.OperationApplicationException;
import android.graphics.Color;
import android.graphics.drawable.ColorDrawable;
import android.net.Uri;
import android.os.Bundle;
import android.os.RemoteException;
import android.provider.ContactsContract;
import android.provider.ContactsContract.CommonDataKinds.Phone;
import android.provider.ContactsContract.CommonDataKinds.StructuredName;
import android.provider.ContactsContract.Data;
import android.provider.ContactsContract.RawContacts;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.TextView;
import android.widget.Toast;
import com.faravy.database.DoctorDataSource;
import com.faravy.modelclass.Doctor;
public class ViewDoctorActivity extends Activity {
Doctor mDoctor;
DoctorDataSource mDataSource;
TextView mEtName;
TextView mEtDetail;
TextView mEtDate;
TextView mEtPhone;
TextView mEtEmail;
String mID = "";
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_view_doctor);
ActionBar ab = getActionBar();
ColorDrawable colorDrawable = new ColorDrawable(
Color.parseColor("#0080FF"));
ab.setBackgroundDrawable(colorDrawable);
mEtName = (TextView) findViewById(R.id.addName);
mEtDetail = (TextView) findViewById(R.id.addDetails);
mEtDate = (TextView) findViewById(R.id.addAppointment);
mEtPhone = (TextView) findViewById(R.id.addPhone);
mEtEmail = (TextView) findViewById(R.id.addEmail);
Intent mActivityIntent = getIntent();
mID = mActivityIntent.getStringExtra("mId");
mDataSource = new DoctorDataSource(this);
mDoctor = mDataSource.singleDoctor(mID);
String name = mDoctor.getmName();
String detail = mDoctor.getmDetails();
String date = mDoctor.getmAppoinment();
String phone = mDoctor.getmPhone();
String email = mDoctor.getmEmail();
mEtName.setText(name);
mEtDetail.setText(detail);
mEtDate.setText(date);
mEtPhone.setText(phone);
mEtEmail.setText(email);
}
public void makeCall(View v) {
String number = mEtPhone.getText().toString().trim();
Intent callIntent = new Intent(Intent.ACTION_CALL, Uri.parse("tel:"
+ number));
startActivity(callIntent);
}
public void sendSms(View v) {
String number = mEtPhone.getText().toString().trim();
Intent smsIntent = new Intent(Intent.ACTION_VIEW, Uri.parse("sms:"
+ number));
startActivity(smsIntent);
}
public void sendEmail(View v) {
String email = mEtEmail.getText().toString();
/*Intent emailIntent = new Intent(Intent.ACTION_SEND,
Uri.parse("mailto:"));
emailIntent.setType("text/plain");
emailIntent.putExtra(Intent.EXTRA_EMAIL, email);
emailIntent.putExtra(Intent.EXTRA_SUBJECT, "Your subject");
emailIntent.putExtra(Intent.EXTRA_TEXT, "Email message goes here");
startActivity(Intent.createChooser(emailIntent, "Send mail..."));*/
Intent intent = new Intent(Intent.ACTION_SENDTO, Uri.fromParts(
"mailto", email, null));
startActivity(Intent.createChooser(intent, "Send email..."));
}
public void addToContact(View v) {
ArrayList<ContentProviderOperation> ops = new ArrayList<ContentProviderOperation>();
int rawContactInsertIndex = ops.size();
ops.add(ContentProviderOperation.newInsert(RawContacts.CONTENT_URI)
.withValue(RawContacts.ACCOUNT_TYPE, null)
.withValue(RawContacts.ACCOUNT_NAME, null).build());
ops.add(ContentProviderOperation
.newInsert(Data.CONTENT_URI)
.withValueBackReference(Data.RAW_CONTACT_ID,
rawContactInsertIndex)
.withValue(Data.MIMETYPE, StructuredName.CONTENT_ITEM_TYPE)
.withValue(StructuredName.DISPLAY_NAME,
mEtName.getText().toString()) // Name of the
// person
.build());
ops.add(ContentProviderOperation
.newInsert(Data.CONTENT_URI)
.withValueBackReference(ContactsContract.Data.RAW_CONTACT_ID,
rawContactInsertIndex)
.withValue(Data.MIMETYPE, Phone.CONTENT_ITEM_TYPE)
.withValue(Phone.NUMBER, mEtPhone.getText().toString()) // Number
// of
// the
// person
.withValue(Phone.TYPE, Phone.TYPE_MOBILE).build()); // Type of
// mobile
// number
try {
getContentResolver().applyBatch(ContactsContract.AUTHORITY, ops);
Toast.makeText(getApplicationContext(),
"Successfully Contract Added !!!!!!!", Toast.LENGTH_LONG)
.show();
} catch (RemoteException e) {
// error
} catch (OperationApplicationException e) {
// error
}
Intent contacts = new Intent(Intent.ACTION_VIEW,
ContactsContract.Contacts.CONTENT_URI);
startActivity(contacts);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.view_doctor, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
}
| FTFL02-ANDROID/Faravy | iCareFaravy/src/com/faravy/icare/ViewDoctorActivity.java | Java | apache-2.0 | 5,472 |
(function($, utils, $HELPER){
var $app = window.vgrome;
$app.controller('CalendarCtrl', ['$scope', '$compile', 'lang', 'apiProvider', '$controller',
function ($scope, $compile, $lang, $apiProvider, $controller) {
$.extend(this, $controller('EntityCtrl', {$scope: $scope}));
$scope.trans = $lang.translate;
window.calendarCtrl = $scope;
$scope.mode = 'index';
$scope.focusFields = { events: [], tasks: [] };
$scope.focusObject = 'Calendar';
$scope.focusId = '';
$scope.listViewData = { events: [], tasks: [] };
$scope.listViewHeaderColumns = {};
$scope.enableShowRefList = enableShowRefList = function(originObject, originId) {
$scope.listViewData = { events: [], tasks: [] };
$scope.listViewHeaderColumns = { events: userdata.search_config['Events'], tasks: userdata.search_config['Calendar'] };
$apiProvider.findListRef(originObject, originId, 'Calendar', $scope.listViewHeaderColumns, function(result) {
utils.log(originObject+':explode ref list('+$scope.focusObject+'):' + $scope.focusId);
if(result.success) {
_.each(result.records['tasks'], function(record) {
var data = {};
_.each($scope.listViewHeaderColumns['tasks'], function(field) {
var recordValue = record[field.name];
if($HELPER.inArray(field.name, ['date_start', 'due_date'])) {
if(field.name == 'date_start') {
recordValue += ' ' + record['time_start'];
} else if(field.name == 'due_date') {
recordValue += ' ' + record['time_end'];
}
}
data[field.name] = $HELPER.formatValueByField(field, recordValue);
if(arrayContains(field.type.name, ['reference', 'owner'])) {
data[field.name + '_display'] = record[field.name + '_display'];
}
});
data['id'] = record.id;
$scope.listViewData['tasks'].push(data);
});
_.each(result.records['events'], function(record) {
var data = {};
_.each($scope.listViewHeaderColumns['events'], function(field) {
var recordValue = record[field.name];
if($HELPER.inArray(field.name, ['date_start', 'due_date'])) {
if(field.name == 'date_start') {
recordValue += ' ' + record['time_start'];
} else if(field.name == 'due_date') {
recordValue += ' ' + record['time_end'];
}
}
data[field.name] = $HELPER.formatValueByField(field, recordValue);
if(arrayContains(field.type.name, ['reference', 'owner'])) {
data[field.name + '_display'] = record[field.name + '_display'];
}
});
data['id'] = record.id;
$scope.listViewData['events'].push(data);
});
$scope.switchMode('ref-list');
utils.hideLoading();
} else {
utils.handleError(result, 'Calendar');
utils.hideLoading();
}
});
};
$scope.submitCreate = submitCreate = function() {
var templateFields = $scope.focusFields;
var post = {};
_.each(templateFields, function(block){
_.each(block.items, function(field){
if(field.uitype.name == 'boolean') {
var value = $('.create-section #inp_'+$scope.focusObject+'_'+field.name).prop( "checked" );
value = value ? 1 : 0;
} else {
var value = $('.create-section #inp_'+$scope.focusObject+'_'+field.name).val();
}
if(field.uitype.name == 'datetime' && arrayContains(field.name, ['date_start', 'due_date'])) {
if(field.name == 'date_start') {
var subData = { time: $('.create-section #inp_'+$scope.focusObject+'_time_start').val() };
} else if(field.name == 'due_date') {
var subData = { time: $('.create-section #inp_'+$scope.focusObject+'_time_end').val() };
}
value = $HELPER.formatToVTFormat(field, value, subData);
} else {
value = $HELPER.formatToVTFormat(field, value);
}
post[field.name] = value;
});
});
post['time_start'] = post['date_start'][1];
post['date_start'] = post['date_start'][0];
post['time_end'] = post['due_date'][1];
post['due_date'] = post['due_date'][0];
if($scope.focusObject == 'Calendar') {
if(empty(post['activitytype'])) {
post['activitytype'] = 'Task';
}
if(empty(post['visibility'])) {
post['visibility'] = 'Public';
}
} else if($scope.focusObject == 'Events') {
if(empty(post['activitytype'])) {
post['activitytype'] = 'Call';
}
if(empty(post['visibility'])) {
post['visibility'] = 'Public';
}
}
//Calculate duration hours and minutes
utils.showLoading();
$apiProvider.createObject($scope.focusObject, post, function(result){
if(result.success) {
var record = result.record;
utils.hideLoading();
$scope.enableShowDetail(record.id);
$scope.needToReloadRefList = true;
} else {
utils.handleError(result, 'Calendar');
utils.hideLoading();
}
});
};
$scope.submitEdit = submitEdit = function(){
var templateFields = $scope.focusFields;
var post = {};
_.each(templateFields, function(block){
_.each(block.items, function(field){
if(field.uitype.name == 'boolean') {
var value = $('.edit-section #inp_'+$scope.focusObject+'_'+field.name).prop( "checked" );
value = value ? 1 : 0;
} else {
var value = $('.edit-section #inp_'+$scope.focusObject+'_'+field.name).val();
}
if(field.uitype.name == 'datetime' && arrayContains(field.name, ['date_start', 'due_date'])) {
if(field.name == 'date_start') {
var subData = { time: $('.edit-section #inp_'+$scope.focusObject+'_time_start').val() };
} else if(field.name == 'due_date') {
var subData = { time: $('.edit-section #inp_'+$scope.focusObject+'_time_end').val() };
}
value = $HELPER.formatToVTFormat(field, value, subData);
} else {
value = $HELPER.formatToVTFormat(field, value);
}
if(value != '') {
post[field.name] = value;
}
});
});
post['time_start'] = post['date_start'][1];
post['date_start'] = post['date_start'][0];
post['time_end'] = post['due_date'][1];
post['due_date'] = post['due_date'][0];
if($scope.focusObject == 'Calendar') {
if(empty(post['activitytype'])) {
post['activitytype'] = 'Task';
}
if(empty(post['visibility'])) {
post['visibility'] = 'Public';
}
} else if($scope.focusObject == 'Events') {
if(empty(post['activitytype'])) {
post['activitytype'] = 'Call';
}
if(empty(post['visibility'])) {
post['visibility'] = 'Public';
}
}
if($scope.focusObject == 'Calendar' || $scope.focusObject == 'Events') {
if(!empty(window.summaryData) && window.summaryData.type == 'Leads') {
post['parent_id'] = summaryData.id;
}
}
utils.showLoading();
$apiProvider.updateObject($scope.focusObject, $scope.focusId, post, function(result){
if(result.success) {
var record = result.record;
utils.hideLoading();
$scope.enableShowDetail(record.id);
} else {
utils.handleError(result, $scope.focusObject);
utils.hideLoading();
}
});
};
$scope.backHistory = backHistory = function () {
if(window.summaryData) {
$scope.mode = 'ref-list';
if(summaryData && $scope.needToReloadRefList) {
utils.showLoading('Reloading data...');
$scope.enableShowRefList(summaryData.type, summaryData.id);
$scope.needToReloadRefList = false;
}
} else {
$scope.mode = 'index';
}
};
}]);
})(jQuery, window.UTILS, window.VTEHelperInstance); | vijay-developer/Chrome-Extension | ui/app/controllers/calendar.js | JavaScript | apache-2.0 | 10,975 |
using UnityEngine;
public class MusicByButton : MonoBehaviour {
public void ShowComposers() {
PopupManager.Instance.PopUp("Music from:\n" +
"soundcloud.com/laserost,\nsyncopika,\njlwinn8videos on YouTube,\nmarksparling.com", "Ok");
}
}
| ZsemberiDaniel/TicTacToe | Assets/Scripts/GUI/MusicByButton.cs | C# | apache-2.0 | 267 |
// Copyright 2012 Google Inc. All Rights Reserved.
/**
* @fileoverview A class representing operations on binary expressions.
*/
goog.provide('wgxpath.BinaryExpr');
goog.require('wgxpath.DataType');
goog.require('wgxpath.Expr');
goog.require('wgxpath.Node');
/**
* Constructor for BinaryExpr.
*
* @param {!wgxpath.BinaryExpr.Op} op A binary operator.
* @param {!wgxpath.Expr} left The left hand side of the expression.
* @param {!wgxpath.Expr} right The right hand side of the expression.
* @extends {wgxpath.Expr}
* @constructor
*/
wgxpath.BinaryExpr = function(op, left, right) {
var opCast = /** @type {!wgxpath.BinaryExpr.Op_} */ (op);
wgxpath.Expr.call(this, opCast.dataType_);
/**
* @private
* @type {!wgxpath.BinaryExpr.Op_}
*/
this.op_ = opCast;
/**
* @private
* @type {!wgxpath.Expr}
*/
this.left_ = left;
/**
* @private
* @type {!wgxpath.Expr}
*/
this.right_ = right;
this.setNeedContextPosition(left.doesNeedContextPosition() ||
right.doesNeedContextPosition());
this.setNeedContextNode(left.doesNeedContextNode() ||
right.doesNeedContextNode());
// Optimize [@id="foo"] and [@name="bar"]
if (this.op_ == wgxpath.BinaryExpr.Op.EQUAL) {
if (!right.doesNeedContextNode() && !right.doesNeedContextPosition() &&
right.getDataType() != wgxpath.DataType.NODESET &&
right.getDataType() != wgxpath.DataType.VOID && left.getQuickAttr()) {
this.setQuickAttr({
name: left.getQuickAttr().name,
valueExpr: right});
} else if (!left.doesNeedContextNode() && !left.doesNeedContextPosition() &&
left.getDataType() != wgxpath.DataType.NODESET &&
left.getDataType() != wgxpath.DataType.VOID && right.getQuickAttr()) {
this.setQuickAttr({
name: right.getQuickAttr().name,
valueExpr: left});
}
}
};
goog.inherits(wgxpath.BinaryExpr, wgxpath.Expr);
/**
* Performs comparison between the left hand side and the right hand side.
*
* @private
* @param {function((string|number|boolean), (string|number|boolean))}
* comp A comparison function that takes two parameters.
* @param {!wgxpath.Expr} lhs The left hand side of the expression.
* @param {!wgxpath.Expr} rhs The right hand side of the expression.
* @param {!wgxpath.Context} ctx The context to perform the comparison in.
* @param {boolean=} opt_equChk Whether the comparison checks for equality.
* @return {boolean} True if comp returns true, false otherwise.
*/
wgxpath.BinaryExpr.compare_ = function(comp, lhs, rhs, ctx, opt_equChk) {
var left = lhs.evaluate(ctx);
var right = rhs.evaluate(ctx);
var lIter, rIter, lNode, rNode;
if (left instanceof wgxpath.NodeSet && right instanceof wgxpath.NodeSet) {
lIter = left.iterator();
for (lNode = lIter.next(); lNode; lNode = lIter.next()) {
rIter = right.iterator();
for (rNode = rIter.next(); rNode; rNode = rIter.next()) {
if (comp(wgxpath.Node.getValueAsString(lNode),
wgxpath.Node.getValueAsString(rNode))) {
return true;
}
}
}
return false;
}
if ((left instanceof wgxpath.NodeSet) ||
(right instanceof wgxpath.NodeSet)) {
var nodeset, primitive;
if ((left instanceof wgxpath.NodeSet)) {
nodeset = left, primitive = right;
} else {
nodeset = right, primitive = left;
}
var iter = nodeset.iterator();
var type = typeof primitive;
for (var node = iter.next(); node; node = iter.next()) {
var stringValue;
switch (type) {
case 'number':
stringValue = wgxpath.Node.getValueAsNumber(node);
break;
case 'boolean':
stringValue = wgxpath.Node.getValueAsBool(node);
break;
case 'string':
stringValue = wgxpath.Node.getValueAsString(node);
break;
default:
throw Error('Illegal primitive type for comparison.');
}
if (comp(stringValue,
/** @type {(string|number|boolean)} */ (primitive))) {
return true;
}
}
return false;
}
if (opt_equChk) {
if (typeof left == 'boolean' || typeof right == 'boolean') {
return comp(!!left, !!right);
}
if (typeof left == 'number' || typeof right == 'number') {
return comp(+left, +right);
}
return comp(left, right);
}
return comp(+left, +right);
};
/**
* @override
* @return {(boolean|number)} The boolean or number result.
*/
wgxpath.BinaryExpr.prototype.evaluate = function(ctx) {
return this.op_.evaluate_(this.left_, this.right_, ctx);
};
/**
* @override
*/
wgxpath.BinaryExpr.prototype.toString = function() {
var text = 'Binary Expression: ' + this.op_;
text += wgxpath.Expr.indent(this.left_);
text += wgxpath.Expr.indent(this.right_);
return text;
};
/**
* A binary operator.
*
* @param {string} opString The operator string.
* @param {number} precedence The precedence when evaluated.
* @param {!wgxpath.DataType} dataType The dataType to return when evaluated.
* @param {function(!wgxpath.Expr, !wgxpath.Expr, !wgxpath.Context)}
* evaluate An evaluation function.
* @constructor
* @private
*/
wgxpath.BinaryExpr.Op_ = function(opString, precedence, dataType, evaluate) {
/**
* @private
* @type {string}
*/
this.opString_ = opString;
/**
* @private
* @type {number}
*/
this.precedence_ = precedence;
/**
* @private
* @type {!wgxpath.DataType}
*/
this.dataType_ = dataType;
/**
* @private
* @type {function(!wgxpath.Expr, !wgxpath.Expr, !wgxpath.Context)}
*/
this.evaluate_ = evaluate;
};
/**
* Returns the precedence for the operator.
*
* @return {number} The precedence.
*/
wgxpath.BinaryExpr.Op_.prototype.getPrecedence = function() {
return this.precedence_;
};
/**
* @override
*/
wgxpath.BinaryExpr.Op_.prototype.toString = function() {
return this.opString_;
};
/**
* A mapping from operator strings to operator objects.
*
* @private
* @type {!Object.<string, !wgxpath.BinaryExpr.Op>}
*/
wgxpath.BinaryExpr.stringToOpMap_ = {};
/**
* Creates a binary operator.
*
* @param {string} opString The operator string.
* @param {number} precedence The precedence when evaluated.
* @param {!wgxpath.DataType} dataType The dataType to return when evaluated.
* @param {function(!wgxpath.Expr, !wgxpath.Expr, !wgxpath.Context)}
* evaluate An evaluation function.
* @return {!wgxpath.BinaryExpr.Op} A binary expression operator.
* @private
*/
wgxpath.BinaryExpr.createOp_ = function(opString, precedence, dataType,
evaluate) {
if (opString in wgxpath.BinaryExpr.stringToOpMap_) {
throw new Error('Binary operator already created: ' + opString);
}
// The upcast and then downcast for the JSCompiler.
var op = /** @type {!Object} */ (new wgxpath.BinaryExpr.Op_(
opString, precedence, dataType, evaluate));
op = /** @type {!wgxpath.BinaryExpr.Op} */ (op);
wgxpath.BinaryExpr.stringToOpMap_[op.toString()] = op;
return op;
};
/**
* Returns the operator with this opString or null if none.
*
* @param {string} opString The opString.
* @return {!wgxpath.BinaryExpr.Op} The operator.
*/
wgxpath.BinaryExpr.getOp = function(opString) {
return wgxpath.BinaryExpr.stringToOpMap_[opString] || null;
};
/**
* Binary operator enumeration.
*
* @enum {{getPrecedence: function(): number}}
*/
wgxpath.BinaryExpr.Op = {
DIV: wgxpath.BinaryExpr.createOp_('div', 6, wgxpath.DataType.NUMBER,
function(left, right, ctx) {
return left.asNumber(ctx) / right.asNumber(ctx);
}),
MOD: wgxpath.BinaryExpr.createOp_('mod', 6, wgxpath.DataType.NUMBER,
function(left, right, ctx) {
return left.asNumber(ctx) % right.asNumber(ctx);
}),
MULT: wgxpath.BinaryExpr.createOp_('*', 6, wgxpath.DataType.NUMBER,
function(left, right, ctx) {
return left.asNumber(ctx) * right.asNumber(ctx);
}),
PLUS: wgxpath.BinaryExpr.createOp_('+', 5, wgxpath.DataType.NUMBER,
function(left, right, ctx) {
return left.asNumber(ctx) + right.asNumber(ctx);
}),
MINUS: wgxpath.BinaryExpr.createOp_('-', 5, wgxpath.DataType.NUMBER,
function(left, right, ctx) {
return left.asNumber(ctx) - right.asNumber(ctx);
}),
LESSTHAN: wgxpath.BinaryExpr.createOp_('<', 4, wgxpath.DataType.BOOLEAN,
function(left, right, ctx) {
return wgxpath.BinaryExpr.compare_(function(a, b) {return a < b;},
left, right, ctx);
}),
GREATERTHAN: wgxpath.BinaryExpr.createOp_('>', 4, wgxpath.DataType.BOOLEAN,
function(left, right, ctx) {
return wgxpath.BinaryExpr.compare_(function(a, b) {return a > b;},
left, right, ctx);
}),
LESSTHAN_EQUAL: wgxpath.BinaryExpr.createOp_(
'<=', 4, wgxpath.DataType.BOOLEAN,
function(left, right, ctx) {
return wgxpath.BinaryExpr.compare_(function(a, b) {return a <= b;},
left, right, ctx);
}),
GREATERTHAN_EQUAL: wgxpath.BinaryExpr.createOp_('>=', 4,
wgxpath.DataType.BOOLEAN, function(left, right, ctx) {
return wgxpath.BinaryExpr.compare_(function(a, b) {return a >= b;},
left, right, ctx);
}),
EQUAL: wgxpath.BinaryExpr.createOp_('=', 3, wgxpath.DataType.BOOLEAN,
function(left, right, ctx) {
return wgxpath.BinaryExpr.compare_(function(a, b) {return a == b;},
left, right, ctx, true);
}),
NOT_EQUAL: wgxpath.BinaryExpr.createOp_('!=', 3, wgxpath.DataType.BOOLEAN,
function(left, right, ctx) {
return wgxpath.BinaryExpr.compare_(function(a, b) {return a != b},
left, right, ctx, true);
}),
AND: wgxpath.BinaryExpr.createOp_('and', 2, wgxpath.DataType.BOOLEAN,
function(left, right, ctx) {
return left.asBool(ctx) && right.asBool(ctx);
}),
OR: wgxpath.BinaryExpr.createOp_('or', 1, wgxpath.DataType.BOOLEAN,
function(left, right, ctx) {
return left.asBool(ctx) || right.asBool(ctx);
})
};
| vinay-qa/vinayit-android-server-apk | third_party/js/wgxpath/binaryExpr.js | JavaScript | apache-2.0 | 10,027 |
# encoding: UTF-8
#
# Cookbook Name:: postfix-dovecot
# Attributes:: vmail
# Author:: Xabier de Zuazo (<xabier@onddo.com>)
# Copyright:: Copyright (c) 2013 Onddo Labs, SL. (www.onddo.com)
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
default['postfix-dovecot']['spamc']['recipe'] = 'onddo-spamassassin'
default['postfix-dovecot']['spamc']['enabled'] = false
default['postfix-dovecot']['vmail']['user'] = 'vmail'
default['postfix-dovecot']['vmail']['group'] =
node['postfix-dovecot']['vmail']['user']
default['postfix-dovecot']['vmail']['uid'] = 5000
default['postfix-dovecot']['vmail']['gid'] =
node['postfix-dovecot']['vmail']['uid']
default['postfix-dovecot']['vmail']['home'] = '/var/vmail'
| NoMan2000/chefRecipes | postfix-dovecot/attributes/vmail.rb | Ruby | apache-2.0 | 1,242 |
/*
* Copyright (C) 2007-2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package jp.sblo.pandora.jota.text;
import android.os.Bundle;
import android.text.Editable;
import android.text.method.KeyListener;
import android.util.Log;
import android.view.inputmethod.BaseInputConnection;
import android.view.inputmethod.CompletionInfo;
import android.view.inputmethod.ExtractedText;
import android.view.inputmethod.ExtractedTextRequest;
public class EditableInputConnection extends BaseInputConnection {
private static final boolean DEBUG = false;
private static final String TAG = "EditableInputConnection";
private final TextView mTextView;
public EditableInputConnection(TextView textview) {
super(textview, true);
mTextView = textview;
}
public Editable getEditable() {
TextView tv = mTextView;
if (tv != null) {
return tv.getEditableText();
}
return null;
}
public boolean beginBatchEdit() {
mTextView.beginBatchEdit();
return true;
}
public boolean endBatchEdit() {
mTextView.endBatchEdit();
return true;
}
public boolean clearMetaKeyStates(int states) {
final Editable content = getEditable();
if (content == null) return false;
KeyListener kl = mTextView.getKeyListener();
if (kl != null) {
try {
kl.clearMetaKeyState(mTextView, content, states);
} catch (AbstractMethodError e) {
// This is an old listener that doesn't implement the
// new method.
}
}
return true;
}
public boolean commitCompletion(CompletionInfo text) {
if (DEBUG) Log.v(TAG, "commitCompletion " + text);
mTextView.beginBatchEdit();
mTextView.onCommitCompletion(text);
mTextView.endBatchEdit();
return true;
}
public boolean performEditorAction(int actionCode) {
if (DEBUG) Log.v(TAG, "performEditorAction " + actionCode);
mTextView.onEditorAction(actionCode);
return true;
}
public boolean performContextMenuAction(int id) {
if (DEBUG) Log.v(TAG, "performContextMenuAction " + id);
mTextView.beginBatchEdit();
mTextView.onTextContextMenuItem(id);
mTextView.endBatchEdit();
return true;
}
public ExtractedText getExtractedText(ExtractedTextRequest request, int flags) {
if (mTextView != null) {
ExtractedText et = new ExtractedText();
if (mTextView.extractText(request, et)) {
if ((flags&GET_EXTRACTED_TEXT_MONITOR) != 0) {
mTextView.setExtracting(request);
}
return et;
}
}
return null;
}
public boolean performPrivateCommand(String action, Bundle data) {
mTextView.onPrivateIMECommand(action, data);
return true;
}
@Override
public boolean commitText(CharSequence text, int newCursorPosition) {
if (mTextView == null) {
return super.commitText(text, newCursorPosition);
}
CharSequence errorBefore = mTextView.getError();
boolean success = super.commitText(text, newCursorPosition);
CharSequence errorAfter = mTextView.getError();
if (errorAfter != null && errorBefore == errorAfter) {
mTextView.setError(null, null);
}
return success;
}
}
| jiro-aqua/JotaTextEditor | app/src/main/java/jp/sblo/pandora/jota/text/EditableInputConnection.java | Java | apache-2.0 | 4,042 |
package cz.znj.kvr.sw.exp.java.netty.netty;
/*
* Copyright 2012 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
import io.netty.channel.Channel;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelHandlerInvoker;
import io.netty.channel.ChannelPromise;
import io.netty.channel.DefaultChannelPromise;
import io.netty.channel.EventLoop;
import io.netty.channel.EventLoopGroup;
import io.netty.util.concurrent.AbstractScheduledEventExecutor;
import io.netty.util.concurrent.EventExecutor;
import io.netty.util.concurrent.Future;
import java.net.SocketAddress;
import java.util.ArrayDeque;
import java.util.Queue;
import java.util.concurrent.TimeUnit;
import static io.netty.channel.ChannelHandlerInvokerUtil.invokeBindNow;
import static io.netty.channel.ChannelHandlerInvokerUtil.invokeChannelActiveNow;
import static io.netty.channel.ChannelHandlerInvokerUtil.invokeChannelInactiveNow;
import static io.netty.channel.ChannelHandlerInvokerUtil.invokeChannelReadCompleteNow;
import static io.netty.channel.ChannelHandlerInvokerUtil.invokeChannelReadNow;
import static io.netty.channel.ChannelHandlerInvokerUtil.invokeChannelRegisteredNow;
import static io.netty.channel.ChannelHandlerInvokerUtil.invokeChannelUnregisteredNow;
import static io.netty.channel.ChannelHandlerInvokerUtil.invokeChannelWritabilityChangedNow;
import static io.netty.channel.ChannelHandlerInvokerUtil.invokeCloseNow;
import static io.netty.channel.ChannelHandlerInvokerUtil.invokeConnectNow;
import static io.netty.channel.ChannelHandlerInvokerUtil.invokeDeregisterNow;
import static io.netty.channel.ChannelHandlerInvokerUtil.invokeDisconnectNow;
import static io.netty.channel.ChannelHandlerInvokerUtil.invokeExceptionCaughtNow;
import static io.netty.channel.ChannelHandlerInvokerUtil.invokeFlushNow;
import static io.netty.channel.ChannelHandlerInvokerUtil.invokeReadNow;
import static io.netty.channel.ChannelHandlerInvokerUtil.invokeUserEventTriggeredNow;
import static io.netty.channel.ChannelHandlerInvokerUtil.invokeWriteNow;
public final class MyEmbeddedEventLoop extends AbstractScheduledEventExecutor implements ChannelHandlerInvoker, EventLoop
{
public static MyEmbeddedEventLoop getInstance()
{
return instance;
}
private static MyEmbeddedEventLoop instance = new MyEmbeddedEventLoop();
private final Queue<Runnable> tasks = new ArrayDeque<Runnable>(2);
@Override
public EventLoop unwrap() {
return this;
}
@Override
public EventLoopGroup parent() {
return (EventLoopGroup) super.parent();
}
@Override
public EventLoop next() {
return (EventLoop) super.next();
}
@Override
public void execute(Runnable command) {
if (command == null) {
throw new NullPointerException("command");
}
tasks.add(command);
}
void runTasks() {
for (;;) {
Runnable task = tasks.poll();
if (task == null) {
break;
}
task.run();
}
}
long runScheduledTasks() {
long time = AbstractScheduledEventExecutor.nanoTime();
for (;;) {
Runnable task = pollScheduledTask(time);
if (task == null) {
return nextScheduledTaskNano();
}
task.run();
}
}
long nextScheduledTask() {
return nextScheduledTaskNano();
}
@Override
protected void cancelScheduledTasks() {
super.cancelScheduledTasks();
}
@Override
public Future<?> shutdownGracefully(long quietPeriod, long timeout, TimeUnit unit) {
throw new UnsupportedOperationException();
}
@Override
public Future<?> terminationFuture() {
throw new UnsupportedOperationException();
}
@Override
@Deprecated
public void shutdown() {
throw new UnsupportedOperationException();
}
@Override
public boolean isShuttingDown() {
return false;
}
@Override
public boolean isShutdown() {
return false;
}
@Override
public boolean isTerminated() {
return false;
}
@Override
public boolean awaitTermination(long timeout, TimeUnit unit) {
return false;
}
@Override
public ChannelFuture register(Channel channel) {
return register(channel, new DefaultChannelPromise(channel, this));
}
@Override
public ChannelFuture register(Channel channel, ChannelPromise promise) {
channel.unsafe().register(this, promise);
return promise;
}
@Override
public boolean inEventLoop() {
return true;
}
@Override
public boolean inEventLoop(Thread thread) {
return true;
}
@Override
public ChannelHandlerInvoker asInvoker() {
return this;
}
@Override
public EventExecutor executor() {
return this;
}
@Override
public void invokeChannelRegistered(ChannelHandlerContext ctx) {
invokeChannelRegisteredNow(ctx);
}
@Override
public void invokeChannelUnregistered(ChannelHandlerContext ctx) {
invokeChannelUnregisteredNow(ctx);
}
@Override
public void invokeChannelActive(ChannelHandlerContext ctx) {
invokeChannelActiveNow(ctx);
}
@Override
public void invokeChannelInactive(ChannelHandlerContext ctx) {
invokeChannelInactiveNow(ctx);
}
@Override
public void invokeExceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
invokeExceptionCaughtNow(ctx, cause);
}
@Override
public void invokeUserEventTriggered(ChannelHandlerContext ctx, Object event) {
invokeUserEventTriggeredNow(ctx, event);
}
@Override
public void invokeChannelRead(ChannelHandlerContext ctx, Object msg) {
invokeChannelReadNow(ctx, msg);
}
@Override
public void invokeChannelReadComplete(ChannelHandlerContext ctx) {
invokeChannelReadCompleteNow(ctx);
}
@Override
public void invokeChannelWritabilityChanged(ChannelHandlerContext ctx) {
invokeChannelWritabilityChangedNow(ctx);
}
@Override
public void invokeBind(ChannelHandlerContext ctx, SocketAddress localAddress, ChannelPromise promise) {
invokeBindNow(ctx, localAddress, promise);
}
@Override
public void invokeConnect(
ChannelHandlerContext ctx,
SocketAddress remoteAddress, SocketAddress localAddress, ChannelPromise promise) {
invokeConnectNow(ctx, remoteAddress, localAddress, promise);
}
@Override
public void invokeDisconnect(ChannelHandlerContext ctx, ChannelPromise promise) {
invokeDisconnectNow(ctx, promise);
}
@Override
public void invokeClose(ChannelHandlerContext ctx, ChannelPromise promise) {
invokeCloseNow(ctx, promise);
}
@Override
public void invokeDeregister(ChannelHandlerContext ctx, final ChannelPromise promise) {
invokeDeregisterNow(ctx, promise);
}
@Override
public void invokeRead(ChannelHandlerContext ctx) {
invokeReadNow(ctx);
}
@Override
public void invokeWrite(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) {
invokeWriteNow(ctx, msg, promise);
}
@Override
public void invokeFlush(ChannelHandlerContext ctx) {
invokeFlushNow(ctx);
}
}
| kvr000/zbynek-java-exp | netty-exp/netty5-exp/netty5-datagram-listener-exp/src/main/java/cz/znj/kvr/sw/exp/java/netty/netty/MyEmbeddedEventLoop.java | Java | apache-2.0 | 7,288 |
// Copyright 2017 Pilosa Corp.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package cmd
import (
"context"
"io"
"github.com/pilosa/pilosa"
"github.com/pilosa/pilosa/ctl"
"github.com/spf13/cobra"
)
var Importer *ctl.ImportCommand
// newImportCommand runs the Pilosa import subcommand for ingesting bulk data.
func newImportCommand(stdin io.Reader, stdout, stderr io.Writer) *cobra.Command {
Importer = ctl.NewImportCommand(stdin, stdout, stderr)
importCmd := &cobra.Command{
Use: "import",
Short: "Bulk load data into pilosa.",
Long: `Bulk imports one or more CSV files to a host's index and field. The data
of the CSV file are grouped by shard for the most efficient import.
The format of the CSV file is:
ROWID,COLUMNID,[TIME]
The file should contain no headers. The TIME column is optional and can be
omitted. If it is present then its format should be YYYY-MM-DDTHH:MM.
`,
RunE: func(cmd *cobra.Command, args []string) error {
Importer.Paths = args
return Importer.Run(context.Background())
},
}
flags := importCmd.Flags()
flags.StringVarP(&Importer.Host, "host", "", "localhost:10101", "host:port of Pilosa.")
flags.StringVarP(&Importer.Index, "index", "i", "", "Pilosa index to import into.")
flags.StringVarP(&Importer.Field, "field", "f", "", "Field to import into.")
flags.BoolVar(&Importer.IndexOptions.Keys, "index-keys", false, "Specify keys=true when creating an index")
flags.BoolVar(&Importer.FieldOptions.Keys, "field-keys", false, "Specify keys=true when creating a field")
flags.StringVar(&Importer.FieldOptions.Type, "field-type", "", "Specify the field type when creating a field. One of: set, int, time, bool, mutex")
flags.Int64Var(&Importer.FieldOptions.Min, "field-min", 0, "Specify the minimum for an int field on creation")
flags.Int64Var(&Importer.FieldOptions.Max, "field-max", 0, "Specify the maximum for an int field on creation")
flags.StringVar(&Importer.FieldOptions.CacheType, "field-cache-type", pilosa.CacheTypeRanked, "Specify the cache type for a set field on creation. One of: none, lru, ranked")
flags.Uint32Var(&Importer.FieldOptions.CacheSize, "field-cache-size", 50000, "Specify the cache size for a set field on creation")
flags.Var(&Importer.FieldOptions.TimeQuantum, "field-time-quantum", "Specify the time quantum for a time field on creation. One of: D, DH, H, M, MD, MDH, Y, YM, YMD, YMDH")
flags.IntVarP(&Importer.BufferSize, "buffer-size", "s", 10000000, "Number of bits to buffer/sort before importing.")
flags.BoolVarP(&Importer.Sort, "sort", "", false, "Enables sorting before import.")
flags.BoolVarP(&Importer.CreateSchema, "create", "e", false, "Create the schema if it does not exist before import.")
flags.BoolVarP(&Importer.Clear, "clear", "", false, "Clear the data provided in the import.")
ctl.SetTLSConfig(flags, &Importer.TLS.CertificatePath, &Importer.TLS.CertificateKeyPath, &Importer.TLS.SkipVerify)
return importCmd
}
| travisturner/pilosa | cmd/import.go | GO | apache-2.0 | 3,454 |
# python 3
# tensorflow 2.0
from __future__ import print_function, division, absolute_import
import os
import argparse
import random
import numpy as np
import datetime
# from numpy import linalg
import os.path as osp
import sys
cur_dir = osp.dirname(osp.abspath(__file__))
sys.path.insert(1, osp.join(cur_dir, '.'))
from sklearn.datasets import load_svmlight_file
from scipy.sparse import csr_matrix
# from scipy.sparse import linalg
import matplotlib
matplotlib.use("Agg")
import matplotlib.pyplot as plt
import tensorflow as tf
from tf_utils import pinv_naive, pinv
path_train = osp.join(cur_dir, "../a9a/a9a")
path_test = osp.join(cur_dir, "../a9a/a9a.t")
MAX_ITER = 100
np_dtype = np.float32
tf_dtype = tf.float32
# manual seed
manualSeed = random.randint(1, 10000) # fix seed
print("Random Seed: ", manualSeed)
random.seed(manualSeed)
np.random.seed(manualSeed)
# load all data
X_train, y_train = load_svmlight_file(path_train, n_features=123, dtype=np_dtype)
X_test, y_test = load_svmlight_file(path_test, n_features=123, dtype=np_dtype)
# X: scipy.sparse.csr.csr_matrix
# X_train: (32561, 123), y_train: (32561,)
# X_test: (16281, 123), y_test:(16281,)
# stack a dimension of ones to X to simplify computation
N_train = X_train.shape[0]
N_test = X_test.shape[0]
X_train = np.hstack((np.ones((N_train, 1)), X_train.toarray())).astype(np_dtype)
X_test = np.hstack((np.ones((N_test, 1)), X_test.toarray())).astype(np_dtype)
# print(X_train.shape, X_test.shape)
y_train = y_train.reshape((N_train, 1))
y_test = y_test.reshape((N_test, 1))
# label: -1, +1 ==> 0, 1
y_train = np.where(y_train == -1, 0, 1)
y_test = np.where(y_test == -1, 0, 1)
# NB: here X's shape is (N,d), which differs to the derivation
def neg_log_likelihood(w, X, y, L2_param=None):
"""
w: dx1
X: Nxd
y: Nx1
L2_param: \lambda>0, will introduce -\lambda/2 ||w||_2^2
"""
# print(type(X), X.dtype)
res = tf.matmul(tf.matmul(tf.transpose(w), tf.transpose(X)), y.astype(np_dtype)) - \
tf.reduce_sum(tf.math.log(1 + tf.exp(tf.matmul(X, w))))
if L2_param != None and L2_param > 0:
res += -0.5 * L2_param * tf.matmul(tf.transpose(w), w)
return -res[0][0]
def prob(X, w):
"""
X: Nxd
w: dx1
---
prob: N x num_classes(2)"""
y = tf.constant(np.array([0.0, 1.0]), dtype=tf.float32)
prob = tf.exp(tf.matmul(X, w) * y) / (1 + tf.exp(tf.matmul(X, w)))
return prob
def compute_acc(X, y, w):
p = prob(X, w)
y_pred = tf.cast(tf.argmax(p, axis=1), tf.float32)
y = tf.cast(tf.squeeze(y), tf.float32)
acc = tf.reduce_mean(tf.cast(tf.equal(y, y_pred), tf.float32))
return acc
def update(w_old, X, y, L2_param=0):
"""
w_new = w_old - w_update
w_update = (X'RX+lambda*I)^(-1) (X'(mu-y) + lambda*w_old)
lambda is L2_param
w_old: dx1
X: Nxd
y: Nx1
---
w_update: dx1
"""
d = X.shape[1]
mu = tf.sigmoid(tf.matmul(X, w_old)) # Nx1
R_flat = mu * (1 - mu) # element-wise, Nx1
L2_reg_term = L2_param * tf.eye(d)
XRX = tf.matmul(tf.transpose(X), R_flat * X) + L2_reg_term # dxd
# np.save('XRX_tf.npy', XRX.numpy())
# calculate pseudo inverse via SVD
# method 1
# slightly better than tfp.math.pinv when L2_param=0
XRX_pinv = pinv_naive(XRX)
# method 2
# XRX_pinv = pinv(XRX)
# w = w - (X^T R X)^(-1) X^T (mu-y)
# w_new = tf.assign(w_old, w_old - tf.matmul(tf.matmul(XRX_pinv, tf.transpose(X)), mu - y))
y = tf.cast(y, tf_dtype)
w_update = tf.matmul(XRX_pinv, tf.matmul(tf.transpose(X), mu - y) + L2_param * w_old)
return w_update
def optimize(w_old, w_update):
"""custom update op, instead of using SGD variants"""
return w_old.assign(w_old - w_update)
def train_IRLS(X_train, y_train, X_test=None, y_test=None, L2_param=0, max_iter=MAX_ITER):
"""train Logistic Regression via IRLS algorithm
X: Nxd
y: Nx1
---
"""
N, d = X_train.shape
w = tf.Variable(0.01 * tf.ones((d, 1), dtype=tf.float32), name="w")
current_time = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
summary_writer = tf.summary.create_file_writer(f"./logs/{current_time}")
print("start training...")
print("L2 param(lambda): {}".format(L2_param))
i = 0
# iteration
while i <= max_iter:
print("iter: {}".format(i))
# print('\t neg log likelihood: {}'.format(sess.run(neg_L, feed_dict=train_feed_dict)))
neg_L = neg_log_likelihood(w, X_train, y_train, L2_param)
print("\t neg log likelihood: {}".format(neg_L))
train_acc = compute_acc(X_train, y_train, w)
with summary_writer.as_default():
tf.summary.scalar("train_acc", train_acc, step=i)
tf.summary.scalar("train_neg_L", neg_L, step=i)
test_acc = compute_acc(X_test, y_test, w)
with summary_writer.as_default():
tf.summary.scalar("test_acc", test_acc, step=i)
print("\t train acc: {}, test acc: {}".format(train_acc, test_acc))
L2_norm_w = np.linalg.norm(w.numpy())
print("\t L2 norm of w: {}".format(L2_norm_w))
if i > 0:
diff_w = np.linalg.norm(w_update.numpy())
print("\t diff of w_old and w: {}".format(diff_w))
if diff_w < 1e-2:
break
w_update = update(w, X_train, y_train, L2_param)
w = optimize(w, w_update)
i += 1
print("training done.")
if __name__ == "__main__":
# test_acc should be about 0.85
lambda_ = 20 # 0
train_IRLS(X_train, y_train, X_test, y_test, L2_param=lambda_, max_iter=100)
from sklearn.linear_model import LogisticRegression
classifier = LogisticRegression()
classifier.fit(X_train, y_train.reshape(N_train,))
y_pred_train = classifier.predict(X_train)
train_acc = np.sum(y_train.reshape(N_train,) == y_pred_train)/N_train
print('train_acc: {}'.format(train_acc))
y_pred_test = classifier.predict(X_test)
test_acc = np.sum(y_test.reshape(N_test,) == y_pred_test)/N_test
print('test acc: {}'.format(test_acc))
| wangg12/IRLS_tf_pytorch | src/IRLS_tf_v2.py | Python | apache-2.0 | 6,061 |
#!/usr/bin/env ruby
app = "./CotEditor.app"
unless Dir.exist?(app) then
puts "Dir not found : #{app}"
exit
end
unless system "spctl -a -v #{app}" then
puts "Code sign error : #{app}"
exit
end
version = `/usr/libexec/PlistBuddy -c "Print :CFBundleVersion" #{app}/Contents/Info.plist`.chomp
puts "Version #{version}"
dmg = "./CotEditor_#{version}.dmg"
if File.exist?(dmg) then
puts "Already exist : #{dmg}"
exit
end
# Create work directory
dmgwork = "./CotEditor_#{version}"
require 'fileutils'
FileUtils.rm_rf(dmgwork)
FileUtils.mkdir(dmgwork)
FileUtils.mv(app, dmgwork)
# Copy additional files
files = "./files"
if Dir.exist?(files) then
FileUtils.cp_r("#{files}/.", dmgwork)
end
# Create dmg
system "hdiutil create -format UDBZ -srcfolder #{dmgwork} #{dmg}"
FileUtils.rm_rf(dmgwork)
puts "Created dmg for #{version}."
| eric-seekas/CotEditor | release/1_create_dmg.rb | Ruby | apache-2.0 | 836 |
(function()
{
const MAX_LINE_CHARS = 4000;
const RE_WS = /^\s*$/;
this.search_panel = function(search, type, handler)
{
return (
[
['div',
this['advanced_' + type + '_search'](search),
'class', 'advanced-search-controls'],
['div',
['div', 'class', 'panel-search mono'],
'class', 'panel-search-container',
'handler', handler],
]);
};
this.searchbar_content = function(search)
{
var content = this.filters(search.controls);
content[0] = 'div';
content.push('class', 'advanced-panel-search');
return content;
};
this._search_input = function(name, type, value, label,
is_selected, is_disabled, title)
{
var input = ['input', 'type', type, 'value', value, 'name', name];
if (is_selected)
{
input.push('checked', 'checked');
}
if (is_disabled)
{
input.push('disabled', 'disabled');
}
if (title)
{
input.push('title', title);
}
var ret = ['label', input, label];
if (title)
{
ret.push('title', title);
}
return ret;
};
this.advanced_search_field = function(search)
{
return (
['div',
['table',
['tr',
['td', this.default_filter(search.controls[0])],
['td',
['span', '\u00A0', 'class', 'search-info-badge'],
'width', '1px'],
['td', this.search_control(search.controls[1]), 'width', '1px'],
['td', this.search_control(search.controls[2]), 'width', '1px']],
'class', 'advanced-search-table'],
'class', 'advanced-search']);
};
this.advanced_dom_search = function(search)
{
return (
[
this.advanced_search_field(search),
['div',
['form',
this._search_input('dom-search-type',
'radio',
DOMSearch.PLAIN_TEXT,
ui_strings.S_LABEL_SEARCH_TYPE_TEXT,
DOMSearch.PLAIN_TEXT == search.search_type),
this._search_input('dom-search-type',
'radio',
DOMSearch.REGEXP,
ui_strings.S_LABEL_SEARCH_TYPE_REGEXP,
DOMSearch.REGEXP == search.search_type),
this._search_input('dom-search-type',
'radio',
DOMSearch.CSS,
ui_strings.S_LABEL_SEARCH_TYPE_CSS,
DOMSearch.CSS == search.search_type),
this._search_input('dom-search-type',
'radio',
DOMSearch.XPATH,
ui_strings.S_LABEL_SEARCH_TYPE_XPATH,
DOMSearch.XPATH == search.search_type),
this._search_input('dom-search-ignore-case',
'checkbox',
'ignore-case',
ui_strings.S_LABEL_SEARCH_FLAG_IGNORE_CASE,
search.ignore_case,
!search.is_token_search),
'handler', 'dom-search-type-changed',
],
],
]);
}.bind(this);
this.advanced_js_search = function(search)
{
return (
[
this.advanced_search_field(search),
['div',
['form',
this._search_input('js-search-type',
'checkbox',
'reg-exp',
ui_strings.S_LABEL_SEARCH_TYPE_REGEXP,
TextSearch.REGEXP == search.search_type),
this._search_input('js-search-ignore-case',
'checkbox',
'ignore-case',
ui_strings.S_LABEL_SEARCH_FLAG_IGNORE_CASE,
search.ignore_case),
this._search_input('js-search-all-files',
'checkbox',
'search-all-files',
ui_strings.S_LABEL_SEARCH_ALL_FILES,
search.search_all_files),
this._search_input('js-search-injected-scripts',
'checkbox',
'search-injected-scripts',
ui_strings.S_LABEL_SEARCH_INJECTED_SCRIPTS,
search.search_injected_scripts,
!search.search_all_files,
ui_strings.S_LABEL_SEARCH_INJECTED_SCRIPTS_TOOLTIP),
'handler', 'js-search-type-changed',
],
],
]);
}.bind(this);
this.js_search_window = function()
{
return ['div', 'class', 'js-search-results', 'handler', 'show-script'];
};
this.js_search_results = function(results, result_count, max_count)
{
var ret = this._search_result_init(result_count, max_count);
var div = null;
for (var rt_id in results)
{
div = ['div'];
div.push(this._search_result_header(rt_id));
div.extend(results[rt_id].map(this.search_result_script, this));
div.push('class', 'js-search-results-runtime');
ret.push(div);
if (this._js_search_ctx.count > this._js_search_ctx.max_count)
{
break;
}
}
return ret;
};
this.js_search_result_single_file = function(script, result_count, max_count)
{
var ret = this._search_result_init(result_count, max_count);
ret.push(this.search_result_script(script));
return ret;
};
this._search_result_init = function(result_count, max_count)
{
var ret = ['div'];
this._js_search_ctx = {count: 0, max_count: max_count};
if (result_count > max_count)
{
ret.push(['div',
['div', ui_strings.S_INFO_TOO_MANY_SEARCH_RESULTS
.replace('%(COUNT)s', result_count)
.replace('%(MAX)s', max_count),
'class', 'info-box'],
'class', 'info-box-container']);
}
return ret;
};
this._search_result_header = function(rt_id)
{
var runtime = window.runtimes.getRuntime(rt_id);
var display_uri = runtime && helpers.shortenURI(runtime.uri);
return ['h2', runtime && (runtime.title || display_uri.uri) || ''];
};
this._format_line_no = function(line_no)
{
line_no = String(line_no);
var padding = [' ', ' ', ' ', ' ', ' ', ' '];
return (padding[line_no.length] || '') + line_no;
};
this.resource_link = function(url, text, line)
{
var ret =
["span", text, "handler", "open-resource-tab",
"data-resource-url", url,
"class", "internal-link"];
if (line)
{
ret.push("data-resource-line-number", String(line));
}
return ret;
};
this.search_result_script = function(script, show_script_uri)
{
var ret = ['div'];
if (this._js_search_ctx.count < this._js_search_ctx.max_count)
{
if (typeof show_script_uri != 'boolean' || show_script_uri)
{
var h3 = ['h3'];
if (script.uri)
{
h3.push(this.resource_link(script.uri, script.uri), ':');
}
else if (script.script_type == "inline")
{
var rt = window.runtimes.getRuntime(script.runtime_id);
if (rt && rt.uri)
{
h3.push(script.script_type + " (");
h3.push(this.resource_link(rt.uri, rt.uri));
h3.push("):");
}
}
else
{
h3.push(script.script_type + ":");
}
ret.push(h3);
}
var line = 0, cur_line = 0, script_data = '', script_tmpl = null, cur = null;
for (var i = 0; i < script.line_matches.length; i++)
{
if (this._js_search_ctx.count++ < this._js_search_ctx.max_count)
{
cur_line = script.line_matches[i];
if (cur_line != line)
{
line = cur_line;
script_data = script.script_data.slice(script.line_arr[line - 1],
script.line_arr[line]);
script_tmpl = this.highlight_js_source(script_data,
null,
script.state_arr[line - 1],
['code'],
true);
if (script_tmpl.length == 2 && RE_WS.test(script_tmpl[1]))
{
script_tmpl[1] += "\u00a0";
}
if (script.line_offsets_length[i] &&
script.line_offsets[i] + script.line_offsets_length[i] > script.get_line_length(line))
{
script_tmpl.push(['span', '…', 'class', 'match-following-line'])
}
ret.push(['div',
['span', String(line), 'class', 'line-no'],
script_tmpl,
'data-line-no', String(line),
'class', 'search-match js-search']);
}
}
}
ret.push('class', 'js-search-results-script js-source',
'data-script-id', String(script.script_id));
}
return ret;
};
}).apply(window.templates || (window.templates = {}));
| operasoftware/dragonfly | src/searches/templates.js | JavaScript | apache-2.0 | 9,442 |